From 5638fd186893b697f763799209ab697e22da91d0 Mon Sep 17 00:00:00 2001 From: Raman Saparkhan Date: Tue, 5 Sep 2023 23:49:06 +0300 Subject: [PATCH 01/80] Added Guide for Snapshot API(snapshot.md) Signed-off-by: Raman Saparkhan Signed-off-by: roma2023 --- USER_GUIDE.md | 1 + guides/snapshot.md | 108 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 109 insertions(+) create mode 100644 guides/snapshot.md diff --git a/USER_GUIDE.md b/USER_GUIDE.md index 4b6b89c2..cba0b5ef 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -153,6 +153,7 @@ print(response) - [Search](guides/search.md) - [Point in Time](guides/point_in_time.md) - [Using a Proxy](guides/proxy.md) +- [Taking a Snapshot](guides/snapshot.md) ## Plugins diff --git a/guides/snapshot.md b/guides/snapshot.md new file mode 100644 index 00000000..cd5fc129 --- /dev/null +++ b/guides/snapshot.md @@ -0,0 +1,108 @@ +# Snapshot Actions +In this guide, we will look at some snapshot actions that allow you to manage and work with snapshots of your indices. + + +## Setup +Let's create a client instance, and an index named `movies`: +```python +from opensearchpy import OpenSearch + +client = OpenSearch( + hosts=['https://admin:admin@localhost:9200'], + use_ssl=True, + verify_certs=False +) + +print(client.info()) # Check server info and make sure the client is connected +client.indices.create(index='movies') +``` +## API Actions +### Create Snapshot Repository +Before taking a snapshot, you need to create a snapshot repository to store the snapshots. You can use the `create_repository` API action for this purpose. The following example creates a snapshot repository named `my_repository`: + +```python +repo_body = { + "type": "fs", # Replace 'fs' with the appropriate repository type + "settings": { + "location": "/path/to/repo", + } +} +client.snapshot.create_repository(repository='my_repository', body=repo_body) +``` + +### Create Snapshot +To create a snapshot of an index, you can use the `create` method from the `snapshot` API. The following example creates a snapshot named `my_snapshot` for the movies index: + +```python +client.snapshot.create(repository='my_repository', snapshot='my_snapshot', body={"indices": "movies"}) +``` + +### Verify Snapshot Repository +The `verify_repository` API action allows you to verify a snapshot repository. Verifying a repository ensures that it is accessible and operational, but it does not validate the integrity of the snapshots stored within the repository. The following example verifies `my_repository`: + +```python +response = client.snapshot.verify_repository(repository='my_repository') +``` + +### Delete Snapshot +To delete a specific snapshot, use the `delete` API action: + +```python +client.snapshot.delete(repository='my_repository', snapshot='my_snapshot') +``` +### Restore Snapshot +To restore a snapshot and recreate the indices, mappings, and data, you can use the `restore` method. The following example restores the `my_snapshot` snapshot: + +```python +response = client.snapshot.restore(repository='my_repository', snapshot='my_snapshot') +``` + +### Get Snapshot Status +To check the status of a snapshot, you can use the `status` method. + +```python +response = client.snapshot.status(repository='my_repository', snapshot='my_snapshot') +``` + +### Clone Snapshot +You can clone an existing snapshot to create a new snapshot with the same contents. The `clone` operation allows you to create multiple copies of a snapshot, which can be useful for backup retention or creating snapshots for different purposes. The following example clones a snapshot named `my_snapshot` to create a new snapshot named `my_snapshot_clone`: + +```python +client.snapshot.clone( + repository='my_repository', + snapshot='my_snapshot', + target_snapshot='my_snapshot_clone' +) +``` +## Get Snapshot +To retrieve information about a specific snapshot, you can use the `get` API action. It provides metadata such as the snapshot's status, indices included in the snapshot, and the timestamp when the snapshot was taken. The following example retrieves information about the `my_snapshot`: + +```python +response = client.snapshot.get( + repository='my_repository', + snapshot='my_snapshot' +) +``` + +## Get Repository +To retrieve information about a snapshot repository, you can use the `get_repository` API action. It provides details about the configured repository, including its type and settings. The following example retrieves information about the `my_repository`: + +```python +response = client.snapshot.get_repository(repository='my_repository') +``` + +## Repository Analyze +The `repository_analyze` API action allows you to analyze a snapshot repository for correctness and performance. It checks for any inconsistencies or corruption in the repository. The following example performs a repository analysis on `my_repository`: + +```python +response = client.snapshot.repository_analyze(repository='my_repository') +``` + +## Cleanup + +Finally, let's delete the `movies` index and clean up all the snapshots and the repository: +```python +client.indices.delete(index='movies') +client.snapshot.delete(repository='my_repository', snapshot='my_snapshot') +client.snapshot.delete_repository(repository='my_repository') +``` \ No newline at end of file From 1603ff9e977ddfb590f78e13b932b76f4a1e137c Mon Sep 17 00:00:00 2001 From: Raman Saparkhan Date: Wed, 6 Sep 2023 00:17:30 +0300 Subject: [PATCH 02/80] Updated CHANGELOG.md and applied working sample on snapshot.md Signed-off-by: Raman Saparkhan Signed-off-by: roma2023 --- CHANGELOG.md | 1 + guides/snapshot.md | 43 +++++++++++++++++++++++++++++++++++-------- 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 040fb8ab..0c26f7bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,6 +42,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Improved CI performance of integration with unreleased OpenSearch ([#318](https://github.com/opensearch-project/opensearch-py/pull/318)) - Added k-NN guide and samples ([#449](https://github.com/opensearch-project/opensearch-py/pull/449)) - Added the ability to run tests matching a pattern to `.ci/run-tests` ([#454](https://github.com/opensearch-project/opensearch-py/pull/454)) +- Added new guide: `snapshot.md` for Snapshot API. ([#486](https://github.com/opensearch-project/opensearch-py/pull/429)) ### Changed - Moved security from `plugins` to `clients` ([#442](https://github.com/opensearch-project/opensearch-py/pull/442)) - Updated Security Client APIs ([#450](https://github.com/opensearch-project/opensearch-py/pull/450)) diff --git a/guides/snapshot.md b/guides/snapshot.md index cd5fc129..2b669d39 100644 --- a/guides/snapshot.md +++ b/guides/snapshot.md @@ -3,18 +3,45 @@ In this guide, we will look at some snapshot actions that allow you to manage an ## Setup -Let's create a client instance, and an index named `movies`: +Let's create a client instance, and an index named `test-index`: ```python from opensearchpy import OpenSearch +# connect to OpenSearch + +host = 'localhost' +port = 9200 +auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + client = OpenSearch( - hosts=['https://admin:admin@localhost:9200'], - use_ssl=True, - verify_certs=False + hosts = [{'host': host, 'port': port}], + http_auth = auth, + use_ssl = True, + verify_certs = False, + ssl_show_warn = False +) + +info = client.info() +print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + +# create an index + +index_name = 'test-index' + +index_body = { + 'settings': { + 'index': { + 'number_of_shards': 4 + } + } +} + +response = client.indices.create( + index_name, + body=index_body ) print(client.info()) # Check server info and make sure the client is connected -client.indices.create(index='movies') ``` ## API Actions ### Create Snapshot Repository @@ -34,7 +61,7 @@ client.snapshot.create_repository(repository='my_repository', body=repo_body) To create a snapshot of an index, you can use the `create` method from the `snapshot` API. The following example creates a snapshot named `my_snapshot` for the movies index: ```python -client.snapshot.create(repository='my_repository', snapshot='my_snapshot', body={"indices": "movies"}) +client.snapshot.create(repository='my_repository', snapshot='my_snapshot', body={"indices": "test-index"}) ``` ### Verify Snapshot Repository @@ -100,9 +127,9 @@ response = client.snapshot.repository_analyze(repository='my_repository') ## Cleanup -Finally, let's delete the `movies` index and clean up all the snapshots and the repository: +Finally, let's delete the `test-index` index and clean up all the snapshots and the repository: ```python -client.indices.delete(index='movies') +client.indices.delete(index='test-index') client.snapshot.delete(repository='my_repository', snapshot='my_snapshot') client.snapshot.delete_repository(repository='my_repository') ``` \ No newline at end of file From 589bc1db38b366430b38419cbd359fa782e669e1 Mon Sep 17 00:00:00 2001 From: Raman Saparkhan Date: Wed, 6 Sep 2023 00:28:26 +0300 Subject: [PATCH 03/80] fixed integration test fail Signed-off-by: Raman Saparkhan Signed-off-by: roma2023 --- guides/snapshot.md | 43 ++++++++----------------------------------- 1 file changed, 8 insertions(+), 35 deletions(-) diff --git a/guides/snapshot.md b/guides/snapshot.md index 2b669d39..cd5fc129 100644 --- a/guides/snapshot.md +++ b/guides/snapshot.md @@ -3,45 +3,18 @@ In this guide, we will look at some snapshot actions that allow you to manage an ## Setup -Let's create a client instance, and an index named `test-index`: +Let's create a client instance, and an index named `movies`: ```python from opensearchpy import OpenSearch -# connect to OpenSearch - -host = 'localhost' -port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. - client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False -) - -info = client.info() -print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") - -# create an index - -index_name = 'test-index' - -index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 - } - } -} - -response = client.indices.create( - index_name, - body=index_body + hosts=['https://admin:admin@localhost:9200'], + use_ssl=True, + verify_certs=False ) print(client.info()) # Check server info and make sure the client is connected +client.indices.create(index='movies') ``` ## API Actions ### Create Snapshot Repository @@ -61,7 +34,7 @@ client.snapshot.create_repository(repository='my_repository', body=repo_body) To create a snapshot of an index, you can use the `create` method from the `snapshot` API. The following example creates a snapshot named `my_snapshot` for the movies index: ```python -client.snapshot.create(repository='my_repository', snapshot='my_snapshot', body={"indices": "test-index"}) +client.snapshot.create(repository='my_repository', snapshot='my_snapshot', body={"indices": "movies"}) ``` ### Verify Snapshot Repository @@ -127,9 +100,9 @@ response = client.snapshot.repository_analyze(repository='my_repository') ## Cleanup -Finally, let's delete the `test-index` index and clean up all the snapshots and the repository: +Finally, let's delete the `movies` index and clean up all the snapshots and the repository: ```python -client.indices.delete(index='test-index') +client.indices.delete(index='movies') client.snapshot.delete(repository='my_repository', snapshot='my_snapshot') client.snapshot.delete_repository(repository='my_repository') ``` \ No newline at end of file From 14cca93df739c9b74653e57cf40e0e8324da54da Mon Sep 17 00:00:00 2001 From: Raman Saparkhan Date: Thu, 7 Sep 2023 23:23:54 +0300 Subject: [PATCH 04/80] added snapshot sample and updated snapshot.md Signed-off-by: Raman Saparkhan Signed-off-by: roma2023 --- CHANGELOG.md | 2 +- guides/snapshot.md | 44 ++++++++++++++++++-- samples/snapshot/snapshot_sample.py | 63 +++++++++++++++++++++++++++++ 3 files changed, 104 insertions(+), 5 deletions(-) create mode 100644 samples/snapshot/snapshot_sample.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c26f7bb..15d3969e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,7 +42,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Improved CI performance of integration with unreleased OpenSearch ([#318](https://github.com/opensearch-project/opensearch-py/pull/318)) - Added k-NN guide and samples ([#449](https://github.com/opensearch-project/opensearch-py/pull/449)) - Added the ability to run tests matching a pattern to `.ci/run-tests` ([#454](https://github.com/opensearch-project/opensearch-py/pull/454)) -- Added new guide: `snapshot.md` for Snapshot API. ([#486](https://github.com/opensearch-project/opensearch-py/pull/429)) +- Added a guide for taking snapshots ([#486](https://github.com/opensearch-project/opensearch-py/pull/429)) ### Changed - Moved security from `plugins` to `clients` ([#442](https://github.com/opensearch-project/opensearch-py/pull/442)) - Updated Security Client APIs ([#450](https://github.com/opensearch-project/opensearch-py/pull/450)) diff --git a/guides/snapshot.md b/guides/snapshot.md index cd5fc129..64e1e24f 100644 --- a/guides/snapshot.md +++ b/guides/snapshot.md @@ -1,3 +1,19 @@ +# Table of Contents +- [Snapshot Actions](#snapshot-actions) + - [Setup](#setup) + - [API Actions](#api-actions) + - [Create Snapshot Repository](#create-snapshot-repository) + - [Create Snapshot](#create-snapshot) + - [Verify Snapshot Repository](#verify-snapshot-repository) + - [Delete Snapshot](#delete-snapshot) + - [Restore Snapshot](#restore-snapshot) + - [Get Snapshot Status](#get-snapshot-status) + - [Clone Snapshot](#clone-snapshot) + - [Get Snapshot](#get-snapshot) + - [Get Repository](#get-repository) + - [Repository Analyze](#repository-analyze) + - [Cleanup](#cleanup) + # Snapshot Actions In this guide, we will look at some snapshot actions that allow you to manage and work with snapshots of your indices. @@ -7,10 +23,16 @@ Let's create a client instance, and an index named `movies`: ```python from opensearchpy import OpenSearch +host = 'localhost' +port = 9200 +auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + client = OpenSearch( - hosts=['https://admin:admin@localhost:9200'], - use_ssl=True, - verify_certs=False + hosts = [{'host': host, 'port': port}], + http_auth = auth, + use_ssl = True, + verify_certs = False, + ssl_show_warn = False ) print(client.info()) # Check server info and make sure the client is connected @@ -27,7 +49,12 @@ repo_body = { "location": "/path/to/repo", } } -client.snapshot.create_repository(repository='my_repository', body=repo_body) + +# Create the snapshot repository and capture the response +response = client.snapshot.create_repository(repository='my_repository', body=repo_body) + +# Print the response to see the result +print(response) ``` ### Create Snapshot @@ -42,6 +69,12 @@ The `verify_repository` API action allows you to verify a snapshot repository. V ```python response = client.snapshot.verify_repository(repository='my_repository') + +# Print the HTTP status code +print("HTTP Status Code:", response.status_code) + +# Print the response content +print("Response Content:", response.content) ``` ### Delete Snapshot @@ -82,6 +115,9 @@ response = client.snapshot.get( repository='my_repository', snapshot='my_snapshot' ) + +# Print the response to see the result +print(response) ``` ## Get Repository diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py new file mode 100644 index 00000000..a1f4ee03 --- /dev/null +++ b/samples/snapshot/snapshot_sample.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +from opensearchpy import OpenSearch + +# connect to OpenSearch + +host = 'localhost' +port = 9200 +auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + +client = OpenSearch( + hosts = [{'host': host, 'port': port}], + http_auth = auth, + use_ssl = True, + verify_certs = False, + ssl_show_warn = False +) + +# Create an index + +index_name = "test-snapshot" +client.indices.create(index = index_name) + +# Create a snapshot repository + +repo_body = { + "type": "fs", # Replace 'fs' with the appropriate repository type + "settings": { + "location": "/path/to/repo", # Replace with the desired repository location + } +} + +repository_name = 'my_repository' +response = client.snapshot.create_repository(repository = repository_name, body = repo_body) + +print(response) + +# Create a snapshot + +snapshot_name = 'my_snapshot' +response = client.snapshot.create(repository = repository_name, snapshot = snapshot_name, body={"indices": index_name}) + +print(response) + +# Get Snapshot Information + +snapshot_info = client.snapshot.get(repository = repository_name, snapshot = snapshot_name) + +print(snapshot_info) + +# Clean up - Delete Snapshot and Repository + +client.snapshot.delete(repository = repository_name, snapshot = snapshot_name) +client.snapshot.delete_repository(repository = repository_name) + +# Clean up - Delete Index + +client.indices.delete(index = index_name) From d63c3580af7575570e9b01d4fc194e8068e92bfb Mon Sep 17 00:00:00 2001 From: Raman Saparkhan Date: Thu, 14 Sep 2023 01:20:37 +0300 Subject: [PATCH 05/80] Added temporary directory to snapshot sample Signed-off-by: Raman Saparkhan Signed-off-by: roma2023 --- samples/snapshot/snapshot_sample.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py index a1f4ee03..72956a09 100644 --- a/samples/snapshot/snapshot_sample.py +++ b/samples/snapshot/snapshot_sample.py @@ -5,6 +5,7 @@ # this file be licensed under the Apache-2.0 license or a # compatible open source license. +import tempfile from opensearchpy import OpenSearch # connect to OpenSearch @@ -26,12 +27,15 @@ index_name = "test-snapshot" client.indices.create(index = index_name) -# Create a snapshot repository +# Create a temporary directory for the snapshot repository +temp_repo = tempfile.TemporaryDirectory() +temp_repo_location = temp_repo.name # Get the path of the temporary directory +# Define the repository body with the temporary location repo_body = { "type": "fs", # Replace 'fs' with the appropriate repository type "settings": { - "location": "/path/to/repo", # Replace with the desired repository location + "location": temp_repo_location, # Replace with the desired repository location } } From 199aa5d94df97d2d8c4f5a77b2e038a595116244 Mon Sep 17 00:00:00 2001 From: Sultan Turan Date: Thu, 14 Dec 2023 13:42:08 +0600 Subject: [PATCH 06/80] add Dockerfile, change temp_repo_location Signed-off-by: Raman Saparkhan Signed-off-by: roma2023 --- samples/Dockerfile | 9 +++++++++ samples/snapshot/snapshot_sample.py | 30 ++++++++++++++--------------- 2 files changed, 24 insertions(+), 15 deletions(-) create mode 100644 samples/Dockerfile diff --git a/samples/Dockerfile b/samples/Dockerfile new file mode 100644 index 00000000..92a9befe --- /dev/null +++ b/samples/Dockerfile @@ -0,0 +1,9 @@ +FROM opensearchproject/opensearch:2.11.0 + +ARG OPENSEARCH_HOME=/usr/share/opensearch +ARG UID=1000 +ARG GID=1000 + +RUN echo 'path.repo: ["/usr/share/opensearch/backups"]' >> $OPENSEARCH_HOME/config/opensearch.yml +RUN mkdir -p $OPENSEARCH_HOME/backups +RUN chown -Rv $UID:$GID $OPENSEARCH_HOME/backups \ No newline at end of file diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py index 72956a09..ac4e85bb 100644 --- a/samples/snapshot/snapshot_sample.py +++ b/samples/snapshot/snapshot_sample.py @@ -12,24 +12,24 @@ host = 'localhost' port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ('admin', 'admin') # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{'host': host, 'port': port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False ) -# Create an index +# Create an index index_name = "test-snapshot" -client.indices.create(index = index_name) +client.indices.create(index=index_name) # Create a temporary directory for the snapshot repository temp_repo = tempfile.TemporaryDirectory() -temp_repo_location = temp_repo.name # Get the path of the temporary directory +temp_repo_location = "/usr/share/opensearch/backups" # Define the repository body with the temporary location repo_body = { @@ -40,28 +40,28 @@ } repository_name = 'my_repository' -response = client.snapshot.create_repository(repository = repository_name, body = repo_body) +response = client.snapshot.create_repository(repository=repository_name, body=repo_body) print(response) # Create a snapshot snapshot_name = 'my_snapshot' -response = client.snapshot.create(repository = repository_name, snapshot = snapshot_name, body={"indices": index_name}) +response = client.snapshot.create(repository=repository_name, snapshot=snapshot_name, body={"indices": index_name}) print(response) # Get Snapshot Information -snapshot_info = client.snapshot.get(repository = repository_name, snapshot = snapshot_name) +snapshot_info = client.snapshot.get(repository=repository_name, snapshot=snapshot_name) print(snapshot_info) # Clean up - Delete Snapshot and Repository -client.snapshot.delete(repository = repository_name, snapshot = snapshot_name) -client.snapshot.delete_repository(repository = repository_name) +client.snapshot.delete(repository=repository_name, snapshot=snapshot_name) +client.snapshot.delete_repository(repository=repository_name) # Clean up - Delete Index -client.indices.delete(index = index_name) +client.indices.delete(index=index_name) From 31aa5f82c935ea11d89e386e2b471384c3038ed4 Mon Sep 17 00:00:00 2001 From: roma2023 Date: Wed, 27 Dec 2023 15:40:02 +0600 Subject: [PATCH 07/80] update USER_GUIDE.md Signed-off-by: roma2023 --- USER_GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/USER_GUIDE.md b/USER_GUIDE.md index cba0b5ef..95ed6f11 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -153,7 +153,7 @@ print(response) - [Search](guides/search.md) - [Point in Time](guides/point_in_time.md) - [Using a Proxy](guides/proxy.md) -- [Taking a Snapshot](guides/snapshot.md) +- [Working with Snapshots](guides/snapshot.md) ## Plugins From 4ad0c5f0842ac717bc4a420290273e4031324a03 Mon Sep 17 00:00:00 2001 From: Raman Saparkhan <109868980+roma2023@users.noreply.github.com> Date: Fri, 15 Sep 2023 10:07:52 +0300 Subject: [PATCH 08/80] [CCI][GUIDE] Minor fixes to poetry docs (#494) * minor fixes to poetry docs Signed-off-by: Raman Saparkhan * updated CHANGELOG.md Signed-off-by: Raman Saparkhan * updated CHANGELOG.md Signed-off-by: Raman Saparkhan * fixed CHANGELOG.md Signed-off-by: Raman Saparkhan --------- Signed-off-by: Raman Saparkhan Signed-off-by: roma2023 --- CHANGELOG.md | 1 + guides/plugins/knn.md | 6 +++--- samples/README.md | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 15d3969e..d758f4e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,6 +57,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Include parsed error info in `TransportError` in async connections ([#226](https://github.com/opensearch-project/opensearch-py/pull/226)) - Enhanced existing API generator to use OpenSearch OpenAPI spec ([#412](https://github.com/opensearch-project/opensearch-py/pull/412)) - Fix crash when attempting to authenticate with an async connection ([#424](https://github.com/opensearch-project/opensearch-py/pull/424)) +- Fixed poetry run command issue on Windows/Mac machines ([#494](https://github.com/opensearch-project/opensearch-py/pull/494)) ### Security - Fixed CVE-2022-23491 reported in opensearch-dsl-py ([#295](https://github.com/opensearch-project/opensearch-py/pull/295)) ### Dependencies diff --git a/guides/plugins/knn.md b/guides/plugins/knn.md index 7a3e6977..a7775c88 100644 --- a/guides/plugins/knn.md +++ b/guides/plugins/knn.md @@ -15,7 +15,7 @@ Short for k-nearest neighbors, the k-NN plugin enables users to search for the k In the following example we create a 5-dimensional k-NN index with random data. You can find a synchronous version of this working sample in [samples/knn/knn-basics.py](../../samples/knn/knn-basics.py) and an asynchronous one in [samples/knn/knn-async-basics.py](../../samples/knn/knn-async-basics.py). ```bash -$ poetry run knn/knn-basics.py +$ poetry run python knn/knn-basics.py Searching for [0.61, 0.05, 0.16, 0.75, 0.49] ... {'_index': 'my-index', '_id': '3', '_score': 0.9252405, '_source': {'values': [0.64, 0.3, 0.27, 0.68, 0.51]}} @@ -96,7 +96,7 @@ for hit in results["hits"]["hits"]: In [the boolean-filter.py sample](../../samples/knn/knn-boolean-filter.py) we create a 5-dimensional k-NN index with random data and a `metadata` field that contains a book genre (e.g. `fiction`). The search query is a k-NN search filtered by genre. The filter clause is outside the k-NN query clause and is applied after the k-NN search. ```bash -$ poetry run knn/knn-boolean-filter.py +$ poetry run python knn/knn-boolean-filter.py Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... @@ -109,7 +109,7 @@ Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... In [the lucene-filter.py sample](../../samples/knn/knn-efficient-filter.py) we implement the example in [the k-NN documentation](https://opensearch.org/docs/latest/search-plugins/knn/filter-search-knn/), which creates an index that uses the Lucene engine and HNSW as the method in the mapping, containing hotel location and parking data, then search for the top three hotels near the location with the coordinates `[5, 4]` that are rated between 8 and 10, inclusive, and provide parking. ```bash -$ poetry run knn/knn-efficient-filter.py +$ poetry run python knn/knn-efficient-filter.py {'_index': 'hotels-index', '_id': '3', '_score': 0.72992706, '_source': {'location': [4.9, 3.4], 'parking': 'true', 'rating': 9}} {'_index': 'hotels-index', '_id': '6', '_score': 0.3012048, '_source': {'location': [6.4, 3.4], 'parking': 'true', 'rating': 9}} diff --git a/samples/README.md b/samples/README.md index ad431cd8..b6e72f2c 100644 --- a/samples/README.md +++ b/samples/README.md @@ -15,5 +15,5 @@ Install [poetry](https://python-poetry.org/docs/). ``` poetry install -poetry run hello/hello.py +poetry run python hello/hello.py ``` From ab2eaf03e8da14f6c2cdaa4cbed5d5febe88f421 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 15 Sep 2023 14:51:55 -0400 Subject: [PATCH 09/80] Fix: typo. (#497) Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 4b672604..3287c11b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,7 +16,7 @@ jobs: - { os: 'ubuntu-latest', python-version: "3.11" } - { os: 'macos-latest', python-version: "3.11" } - name: test (ruby=${{ matrix.entry.os }}, python=${{ matrix.entry.python-version }}) + name: test (os=${{ matrix.entry.os }}, python=${{ matrix.entry.python-version }}) continue-on-error: ${{ matrix.entry.experimental || false }} runs-on: ${{ matrix.entry.os }} steps: From a4036a77fae5db789d3d1989316155c926a214fe Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Tue, 26 Sep 2023 09:46:18 -0700 Subject: [PATCH 10/80] Updated APIs to match other clients and opensearch openapi spec (#502) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 2 + opensearchpy/_async/client/__init__.py | 89 +++++++----- opensearchpy/_async/client/__init__.pyi | 35 +++-- opensearchpy/_async/client/_patch.py | 135 ++++++++++++++++++ opensearchpy/_async/client/_patch.pyi | 70 +++++++++ opensearchpy/_async/client/security.py | 19 +-- opensearchpy/_async/client/security.pyi | 39 ++++- opensearchpy/client/__init__.py | 88 +++++++----- opensearchpy/client/__init__.pyi | 35 +++-- opensearchpy/client/_patch.py | 133 +++++++++++++++++ opensearchpy/client/_patch.pyi | 69 +++++++++ opensearchpy/client/security.py | 19 +-- opensearchpy/client/security.pyi | 43 ++++-- .../test_security_plugin.py | 52 +++++++ .../test_client/test_point_in_time.py | 17 +++ .../test_security_plugin.py | 52 +++++++ utils/generate-api.py | 16 ++- 17 files changed, 786 insertions(+), 127 deletions(-) create mode 100644 opensearchpy/_async/client/_patch.py create mode 100644 opensearchpy/_async/client/_patch.pyi create mode 100644 opensearchpy/client/_patch.py create mode 100644 opensearchpy/client/_patch.pyi diff --git a/CHANGELOG.md b/CHANGELOG.md index d758f4e0..08306124 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,8 +4,10 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) +- Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Changed ### Deprecated +- Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed ### Fixed ### Security diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 57f56b0f..2440b291 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -172,6 +172,12 @@ def default(self, obj): """ + from ._patch import ( + create_point_in_time, + delete_point_in_time, + list_all_point_in_time, + ) + def __init__(self, hosts=None, transport_class=AsyncTransport, **kwargs): """ :arg hosts: list of nodes, or a single node, we should connect to. @@ -1955,64 +1961,73 @@ async def get_script_languages(self, params=None, headers=None): "GET", "/_script_language", params=params, headers=headers ) - @query_params() - async def list_all_point_in_time(self, params=None, headers=None): + @query_params( + "allow_partial_pit_creation", + "expand_wildcards", + "keep_alive", + "preference", + "routing", + ) + async def create_pit(self, index, params=None, headers=None): """ - Returns the list of point in times which are alive + Creates point in time context. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_partial_pit_creation: Allow if point in time can be + created with partial failures. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg keep_alive: Specify the keep alive for point in time. + :arg preference: Specify the node or shard the operation should + be performed on. + :arg routing: Comma-separated list of specific routing values. """ + if index in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'index'.") + return await self.transport.perform_request( - "GET", - _make_path("_search", "point_in_time", "_all"), + "POST", + _make_path(index, "_search", "point_in_time"), params=params, headers=headers, ) @query_params() - async def delete_point_in_time( - self, body=None, all=False, params=None, headers=None - ): + async def delete_all_pits(self, params=None, headers=None): """ - Delete a point in time - + Deletes all active point in time searches. - :arg body: a point-in-time id to delete - :arg all: set it to `True` to delete all alive point in time. """ - path = ( - _make_path("_search", "point_in_time", "_all") - if all - else _make_path("_search", "point_in_time") - ) return await self.transport.perform_request( - "DELETE", path, params=params, headers=headers, body=body + "DELETE", "/_search/point_in_time/_all", params=params, headers=headers ) - @query_params( - "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" - ) - async def create_point_in_time(self, index=None, params=None, headers=None): + @query_params() + async def delete_pit(self, body=None, params=None, headers=None): """ - Create a point in time that can be used in subsequent searches + Deletes one or more point in time searches based on the IDs passed. - :arg index: A comma-separated list of index names to create point - in time; use `_all` or empty string to perform the operation on all - indices - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg keep_alive: Specific the time to live for the point in time - :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + :arg body: a point-in-time id to delete """ return await self.transport.perform_request( - "POST", - _make_path(index, "_search", "point_in_time"), + "DELETE", + "/_search/point_in_time", params=params, headers=headers, + body=body, + ) + + @query_params() + async def get_all_pits(self, params=None, headers=None): + """ + Lists all active point in time searches. + """ + return await self.transport.perform_request( + "GET", "/_search/point_in_time/_all", params=params, headers=headers ) @query_params() diff --git a/opensearchpy/_async/client/__init__.pyi b/opensearchpy/_async/client/__init__.pyi index 27a47ed9..a016d791 100644 --- a/opensearchpy/_async/client/__init__.pyi +++ b/opensearchpy/_async/client/__init__.pyi @@ -1057,7 +1057,29 @@ class AsyncOpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def list_all_point_in_time( + async def create_pit( + self, + index: Any, + *, + allow_partial_pit_creation: Optional[Any] = ..., + expand_wildcards: Optional[Any] = ..., + keep_alive: Optional[Any] = ..., + preference: Optional[Any] = ..., + routing: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def delete_all_pits( self, *, pretty: Optional[bool] = ..., @@ -1073,11 +1095,10 @@ class AsyncOpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def delete_point_in_time( + async def delete_pit( self, *, body: Optional[Any] = ..., - all: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1091,15 +1112,9 @@ class AsyncOpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def create_point_in_time( + async def get_all_pits( self, *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., diff --git a/opensearchpy/_async/client/_patch.py b/opensearchpy/_async/client/_patch.py new file mode 100644 index 00000000..b1b00942 --- /dev/null +++ b/opensearchpy/_async/client/_patch.py @@ -0,0 +1,135 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import warnings + +from .utils import SKIP_IN_PATH, query_params + + +@query_params() +async def list_all_point_in_time(self, params=None, headers=None): + """ + Returns the list of active point in times searches + + .. warning:: + + This API will be removed in a future version + Use 'get_all_pits' API instead. + + """ + warnings.warn( + "The 'list_all_point_in_time' API is deprecated and will be removed in a future version. Use 'get_all_pits' API instead.", + DeprecationWarning, + ) + + return await self.get_all_pits(params=params, headers=headers) + + +@query_params( + "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" +) +async def create_point_in_time(self, index, params=None, headers=None): + """ + Create a point in time that can be used in subsequent searches + + + :arg index: A comma-separated list of index names to open point + in time; use `_all` or empty string to perform the operation on all + indices + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices: open, + closed, hidden, none, all Default: open + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed) + :arg keep_alive: Specific the time to live for the point in time + :arg preference: Specify the node or shard the operation should + be performed on (default: random) + :arg routing: Specific routing value + + .. warning:: + + This API will be removed in a future version + Use 'create_pit' API instead. + + """ + warnings.warn( + "The 'create_point_in_time' API is deprecated and will be removed in a future version. Use 'create_pit' API instead.", + DeprecationWarning, + ) + + return await self.create_pit(index=index, params=params, headers=headers) + + +@query_params() +async def delete_point_in_time(self, body=None, all=False, params=None, headers=None): + """ + Delete a point in time + + + :arg body: a point-in-time id to delete + :arg all: set it to `True` to delete all alive point in time. + + .. warning:: + + This API will be removed in a future version + Use 'delete_all_pits' or 'delete_pit' API instead. + + """ + warnings.warn( + "The 'delete_point_in_time' API is deprecated and will be removed in a future version. Use 'delete_all_pits' or 'delete_pit' API instead.", + DeprecationWarning, + ) + + if all: + return await self.delete_all_pits(params=params, headers=headers) + else: + return await self.delete_pit(body=body, params=params, headers=headers) + + +@query_params() +async def health_check(self, params=None, headers=None): + """ + Checks to see if the Security plugin is up and running. + + .. warning:: + + This API will be removed in a future version + Use 'health' API instead. + + """ + warnings.warn( + "The 'health_check' API in security client is deprecated and will be removed in a future version. Use 'health' API instead.", + DeprecationWarning, + ) + + return await self.health(params=params, headers=headers) + + +@query_params() +async def update_audit_config(self, body, params=None, headers=None): + """ + A PUT call updates the audit configuration. + + .. warning:: + + This API will be removed in a future version + Use 'update_audit_configuration' API instead. + + """ + warnings.warn( + "The 'update_audit_config' API in security client is deprecated and will be removed in a future version. Use 'update_audit_configuration' API instead.", + DeprecationWarning, + ) + + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.update_audit_configuration( + params=params, headers=headers, body=body + ) diff --git a/opensearchpy/_async/client/_patch.pyi b/opensearchpy/_async/client/_patch.pyi new file mode 100644 index 00000000..1912c180 --- /dev/null +++ b/opensearchpy/_async/client/_patch.pyi @@ -0,0 +1,70 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union + +async def list_all_point_in_time( + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., +) -> Any: ... +async def create_point_in_time( + *, + index: Optional[Any] = ..., + expand_wildcards: Optional[Any] = ..., + ignore_unavailable: Optional[Any] = ..., + keep_alive: Optional[Any] = ..., + preference: Optional[Any] = ..., + routing: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., +) -> Any: ... +async def delete_point_in_time( + *, + body: Optional[Any] = ..., + all: Optional[bool] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., +) -> Any: ... +async def health_check( + params: Union[Any, None] = ..., headers: Union[Any, None] = ... +) -> Union[bool, Any]: ... +async def update_audit_config( + body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... +) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index 65021765..bc8e8671 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -7,10 +7,12 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params +from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): + from ._patch import health_check, update_audit_config + @query_params() async def get_account_details(self, params=None, headers=None): """ @@ -648,15 +650,13 @@ async def flush_cache(self, params=None, headers=None): ) @query_params() - async def health_check(self, params=None, headers=None): + async def health(self, params=None, headers=None): """ Checks to see if the Security plugin is up and running. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "health"), - params=params, - headers=headers, + "GET", "/_plugins/_security/health", params=params, headers=headers ) @query_params() @@ -672,16 +672,17 @@ async def get_audit_configuration(self, params=None, headers=None): ) @query_params() - async def update_audit_config(self, body, params=None, headers=None): + async def update_audit_configuration(self, body, params=None, headers=None): """ - A PUT call updates the audit configuration. + Updates the audit configuration. + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "audit", "config"), + "/_plugins/_security/api/audit/config", params=params, headers=headers, body=body, diff --git a/opensearchpy/_async/client/security.pyi b/opensearchpy/_async/client/security.pyi index 77239296..7840445a 100644 --- a/opensearchpy/_async/client/security.pyi +++ b/opensearchpy/_async/client/security.pyi @@ -6,9 +6,9 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from typing import Any, Union +from typing import Any, Collection, MutableMapping, Optional, Tuple, Union -from ..client.utils import NamespacedClient as NamespacedClient +from .utils import NamespacedClient as NamespacedClient class SecurityClient(NamespacedClient): async def get_account_details( @@ -192,14 +192,41 @@ class SecurityClient(NamespacedClient): async def flush_cache( self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... ) -> Any: ... - async def health_check( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + async def health( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_audit_configuration( self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... ) -> Any: ... - async def update_audit_config( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + async def update_audit_configuration( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_audit_configuration( self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 1fe0c959..8f976879 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -173,6 +173,12 @@ def default(self, obj): """ + from ._patch import ( + create_point_in_time, + delete_point_in_time, + list_all_point_in_time, + ) + def __init__(self, hosts=None, transport_class=Transport, **kwargs): """ :arg hosts: list of nodes, or a single node, we should connect to. @@ -1955,63 +1961,73 @@ def get_script_languages(self, params=None, headers=None): "GET", "/_script_language", params=params, headers=headers ) - @query_params() - def list_all_point_in_time(self, params=None, headers=None): + @query_params( + "allow_partial_pit_creation", + "expand_wildcards", + "keep_alive", + "preference", + "routing", + ) + def create_pit(self, index, params=None, headers=None): """ - Returns the list of active point in times searches + Creates point in time context. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_partial_pit_creation: Allow if point in time can be + created with partial failures. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg keep_alive: Specify the keep alive for point in time. + :arg preference: Specify the node or shard the operation should + be performed on. + :arg routing: Comma-separated list of specific routing values. """ + if index in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'index'.") + return self.transport.perform_request( - "GET", - _make_path("_search", "point_in_time", "_all"), + "POST", + _make_path(index, "_search", "point_in_time"), params=params, headers=headers, ) @query_params() - def delete_point_in_time(self, body=None, all=False, params=None, headers=None): + def delete_all_pits(self, params=None, headers=None): """ - Delete a point in time - + Deletes all active point in time searches. - :arg body: a point-in-time id to delete - :arg all: set it to `True` to delete all alive point in time. """ - - path = ( - _make_path("_search", "point_in_time", "_all") - if all - else _make_path("_search", "point_in_time") - ) return self.transport.perform_request( - "DELETE", path, params=params, headers=headers, body=body + "DELETE", "/_search/point_in_time/_all", params=params, headers=headers ) - @query_params( - "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" - ) - def create_point_in_time(self, index=None, params=None, headers=None): + @query_params() + def delete_pit(self, body=None, params=None, headers=None): """ - Create a point in time that can be used in subsequent searches + Deletes one or more point in time searches based on the IDs passed. - :arg index: A comma-separated list of index names to open point - in time; use `_all` or empty string to perform the operation on all - indices - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg keep_alive: Specific the time to live for the point in time - :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + :arg body: a point-in-time id to delete """ return self.transport.perform_request( - "POST", - _make_path(index, "_search", "point_in_time"), + "DELETE", + "/_search/point_in_time", params=params, headers=headers, + body=body, + ) + + @query_params() + def get_all_pits(self, params=None, headers=None): + """ + Lists all active point in time searches. + """ + return self.transport.perform_request( + "GET", "/_search/point_in_time/_all", params=params, headers=headers ) @query_params() diff --git a/opensearchpy/client/__init__.pyi b/opensearchpy/client/__init__.pyi index 64f21ca7..e1d1e359 100644 --- a/opensearchpy/client/__init__.pyi +++ b/opensearchpy/client/__init__.pyi @@ -1054,7 +1054,29 @@ class OpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def list_all_point_in_time( + def create_pit( + self, + index: Any, + *, + allow_partial_pit_creation: Optional[Any] = ..., + expand_wildcards: Optional[Any] = ..., + keep_alive: Optional[Any] = ..., + preference: Optional[Any] = ..., + routing: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def delete_all_pits( self, *, pretty: Optional[bool] = ..., @@ -1070,11 +1092,10 @@ class OpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def delete_point_in_time( + def delete_pit( self, *, body: Optional[Any] = ..., - all: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1088,15 +1109,9 @@ class OpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def create_point_in_time( + def get_all_pits( self, *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., diff --git a/opensearchpy/client/_patch.py b/opensearchpy/client/_patch.py new file mode 100644 index 00000000..d92eae5a --- /dev/null +++ b/opensearchpy/client/_patch.py @@ -0,0 +1,133 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import warnings + +from .utils import SKIP_IN_PATH, query_params + + +@query_params() +def list_all_point_in_time(self, params=None, headers=None): + """ + Returns the list of active point in times searches + + .. warning:: + + This API will be removed in a future version + Use 'get_all_pits' API instead. + + """ + warnings.warn( + "The 'list_all_point_in_time' API is deprecated and will be removed in a future version. Use 'get_all_pits' API instead.", + DeprecationWarning, + ) + + return self.get_all_pits(params=params, headers=headers) + + +@query_params( + "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" +) +def create_point_in_time(self, index, params=None, headers=None): + """ + Create a point in time that can be used in subsequent searches + + + :arg index: A comma-separated list of index names to open point + in time; use `_all` or empty string to perform the operation on all + indices + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices: open, + closed, hidden, none, all Default: open + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed) + :arg keep_alive: Specific the time to live for the point in time + :arg preference: Specify the node or shard the operation should + be performed on (default: random) + :arg routing: Specific routing value + + .. warning:: + + This API will be removed in a future version + Use 'create_pit' API instead. + + """ + warnings.warn( + "The 'create_point_in_time' API is deprecated and will be removed in a future version. Use 'create_pit' API instead.", + DeprecationWarning, + ) + + return self.create_pit(index=index, params=params, headers=headers) + + +@query_params() +def delete_point_in_time(self, body=None, all=False, params=None, headers=None): + """ + Delete a point in time + + + :arg body: a point-in-time id to delete + :arg all: set it to `True` to delete all alive point in time. + + .. warning:: + + This API will be removed in a future version + Use 'delete_all_pits' or 'delete_pit' API instead. + + """ + warnings.warn( + "The 'delete_point_in_time' API is deprecated and will be removed in a future version. Use 'delete_all_pits' or 'delete_pit' API instead.", + DeprecationWarning, + ) + + if all: + return self.delete_all_pits(params=params, headers=headers) + else: + return self.delete_pit(body=body, params=params, headers=headers) + + +@query_params() +def health_check(self, params=None, headers=None): + """ + Checks to see if the Security plugin is up and running. + + .. warning:: + + This API will be removed in a future version + Use 'health' API instead. + + """ + warnings.warn( + "The 'health_check' API in security client is deprecated and will be removed in a future version. Use 'health' API instead.", + DeprecationWarning, + ) + + return self.health(params=params, headers=headers) + + +@query_params() +def update_audit_config(self, body, params=None, headers=None): + """ + A PUT call updates the audit configuration. + + .. warning:: + + This API will be removed in a future version + Use 'update_audit_configuration' API instead. + + """ + warnings.warn( + "The 'update_audit_config' API in security client is deprecated and will be removed in a future version. Use 'update_audit_configuration' API instead.", + DeprecationWarning, + ) + + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return self.update_audit_configuration(params=params, headers=headers, body=body) diff --git a/opensearchpy/client/_patch.pyi b/opensearchpy/client/_patch.pyi new file mode 100644 index 00000000..be6e12a0 --- /dev/null +++ b/opensearchpy/client/_patch.pyi @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union + +def list_all_point_in_time( + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., +) -> Any: ... +def create_point_in_time( + *, + index: Optional[Any] = ..., + expand_wildcards: Optional[Any] = ..., + ignore_unavailable: Optional[Any] = ..., + keep_alive: Optional[Any] = ..., + preference: Optional[Any] = ..., + routing: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., +) -> Any: ... +def delete_point_in_time( + *, + body: Optional[Any] = ..., + all: Optional[bool] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., +) -> Any: ... +def health_check( + params: Union[Any, None] = ..., headers: Union[Any, None] = ... +) -> Union[bool, Any]: ... +def update_audit_config( + body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... +) -> Union[bool, Any]: ... diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 288f6676..32a362b3 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -7,10 +7,12 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params +from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): + from ._patch import health_check, update_audit_config + @query_params() def get_account_details(self, params=None, headers=None): """ @@ -644,15 +646,13 @@ def flush_cache(self, params=None, headers=None): ) @query_params() - def health_check(self, params=None, headers=None): + def health(self, params=None, headers=None): """ Checks to see if the Security plugin is up and running. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "health"), - params=params, - headers=headers, + "GET", "/_plugins/_security/health", params=params, headers=headers ) @query_params() @@ -668,16 +668,17 @@ def get_audit_configuration(self, params=None, headers=None): ) @query_params() - def update_audit_config(self, body, params=None, headers=None): + def update_audit_configuration(self, body, params=None, headers=None): """ - A PUT call updates the audit configuration. + Updates the audit configuration. + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "audit", "config"), + "/_plugins/_security/api/audit/config", params=params, headers=headers, body=body, diff --git a/opensearchpy/client/security.pyi b/opensearchpy/client/security.pyi index de50b8b2..c729d5d3 100644 --- a/opensearchpy/client/security.pyi +++ b/opensearchpy/client/security.pyi @@ -6,9 +6,9 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from typing import Any, Union +from typing import Any, Collection, MutableMapping, Optional, Tuple, Union -from ..client.utils import NamespacedClient as NamespacedClient +from .utils import NamespacedClient as NamespacedClient class SecurityClient(NamespacedClient): def get_account_details( @@ -192,15 +192,42 @@ class SecurityClient(NamespacedClient): def flush_cache( self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... ) -> Union[bool, Any]: ... - def health_check( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... + def health( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def get_audit_configuration( self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... ) -> Union[bool, Any]: ... - def update_audit_config( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... + def update_audit_configuration( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def patch_audit_configuration( self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... ) -> Union[bool, Any]: ... diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 39189c21..9b1f7a5f 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -174,3 +174,55 @@ async def test_delete_user(self): # Try fetching the user with self.assertRaises(NotFoundError): response = await self.client.security.get_user(self.USER_NAME) + + async def test_health_check(self): + response = await self.client.security.health_check() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + async def test_health(self): + response = await self.client.security.health() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + AUDIT_CONFIG_SETTINGS = { + "enabled": True, + "audit": { + "ignore_users": [], + "ignore_requests": [], + "disabled_rest_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "disabled_transport_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "log_request_body": False, + "resolve_indices": False, + "resolve_bulk_requests": False, + "exclude_sensitive_headers": True, + "enable_transport": False, + "enable_rest": True, + }, + "compliance": { + "enabled": True, + "write_log_diffs": False, + "read_watched_fields": {}, + "read_ignore_users": [], + "write_watched_indices": [], + "write_ignore_users": [], + "read_metadata_only": True, + "write_metadata_only": True, + "external_config": False, + "internal_config": True, + }, + } + + async def test_update_audit_config(self): + response = await self.client.security.update_audit_config( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) + + async def test_update_audit_configuration(self): + response = await self.client.security.update_audit_configuration( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index 53742dbe..e8546484 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -27,3 +27,20 @@ def test_delete_all_point_in_time(self): def test_list_all_point_in_time(self): self.client.list_all_point_in_time() self.assert_url_called("GET", "/_search/point_in_time/_all") + + def test_create_pit(self): + index_name = "test-index" + self.client.create_pit(index=index_name) + self.assert_url_called("POST", "/test-index/_search/point_in_time") + + def test_delete_pit(self): + self.client.delete_pit(body={"pit_id": ["Sample-PIT-ID"]}) + self.assert_url_called("DELETE", "/_search/point_in_time") + + def test_delete_all_pits(self): + self.client.delete_all_pits() + self.assert_url_called("DELETE", "/_search/point_in_time/_all") + + def test_get_all_pits(self): + self.client.get_all_pits() + self.assert_url_called("GET", "/_search/point_in_time/_all") diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 1f46712a..90283af8 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -164,3 +164,55 @@ def test_delete_user(self): # Try fetching the user with self.assertRaises(NotFoundError): response = self.client.security.get_user(self.USER_NAME) + + def test_health_check(self): + response = self.client.security.health_check() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + def test_health(self): + response = self.client.security.health() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + AUDIT_CONFIG_SETTINGS = { + "enabled": True, + "audit": { + "ignore_users": [], + "ignore_requests": [], + "disabled_rest_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "disabled_transport_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "log_request_body": False, + "resolve_indices": False, + "resolve_bulk_requests": False, + "exclude_sensitive_headers": True, + "enable_transport": False, + "enable_rest": True, + }, + "compliance": { + "enabled": True, + "write_log_diffs": False, + "read_watched_fields": {}, + "read_ignore_users": [], + "write_watched_indices": [], + "write_ignore_users": [], + "read_metadata_only": True, + "write_metadata_only": True, + "external_config": False, + "internal_config": True, + }, + } + + def test_update_audit_config(self): + response = self.client.security.update_audit_config( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) + + def test_update_audit_configuration(self): + response = self.client.security.update_audit_configuration( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) diff --git a/utils/generate-api.py b/utils/generate-api.py index 40aea2ba..f13a41b5 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -120,6 +120,13 @@ def parse_orig(self): for line in content.split("\n"): header_lines.append(line) if line.startswith("class"): + if ( + "security.py" in str(self.filepath) + and not self.filepath.suffix == ".pyi" + ): + header_lines.append( + " from ._patch import health_check, update_audit_config" + ) break self.header = "\n".join(header_lines) self.orders = re.findall( @@ -375,8 +382,12 @@ def method(self): # To adhere to the HTTP RFC we shouldn't send # bodies in GET requests. default_method = self.path["methods"][0] + if self.name == "refresh" or self.name == "flush": + return "POST" if self.body and default_method == "GET" and "POST" in self.path["methods"]: return "POST" + if "POST" and "PUT" in self.path["methods"] and self.name != "bulk": + return "PUT" return default_method @property @@ -437,8 +448,9 @@ def read_modules(): for path in data["paths"]: for x in data["paths"][path]: - data["paths"][path][x].update({"path": path, "method": x}) - list_of_dicts.append(data["paths"][path][x]) + if "deprecated" not in data["paths"][path][x]: + data["paths"][path][x].update({"path": path, "method": x}) + list_of_dicts.append(data["paths"][path][x]) # Update parameters in each endpoint for p in list_of_dicts: From 55c179032bafdb60af64dd2c2c1036d3ebd9c7ab Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Tue, 26 Sep 2023 09:49:25 -0700 Subject: [PATCH 11/80] Integrated generated APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility (#508) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/client/tasks.py | 64 +++++++++++++--------------- opensearchpy/_async/client/tasks.pyi | 9 ++++ opensearchpy/client/tasks.py | 64 +++++++++++++--------------- opensearchpy/client/tasks.pyi | 9 ++++ utils/generated_file_headers.txt | 10 ++--- 6 files changed, 84 insertions(+), 73 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 08306124..07aafc12 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Changed +- Integrated generated `tasks client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 2b49ddc0..212b9e56 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + import warnings from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -45,24 +55,20 @@ async def list(self, params=None, headers=None): Returns a list of tasks. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) + :arg detailed: Return detailed task information. :arg group_by: Group tasks by nodes or parent/child - relationships Valid choices: nodes, parents, none Default: nodes - :arg nodes: A comma-separated list of node IDs or names to limit + relationships. Valid choices: nodes, parents, none + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. """ return await self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers @@ -74,23 +80,18 @@ async def cancel(self, task_id=None, params=None, headers=None): Cancels a task, if it can be cancelled through an API. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Cancel the task with specified task id - (node_id:task_number) - :arg actions: A comma-separated list of actions that should be + (node_id:task_number). + :arg actions: Comma-separated list of actions that should be cancelled. Leave empty to cancel all. - :arg nodes: A comma-separated list of node IDs or names to limit + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. - :arg wait_for_completion: Should the request block until the - cancellation of the task and its descendant tasks is completed. Defaults - to false + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. """ return await self.transport.perform_request( "POST", @@ -105,16 +106,11 @@ async def get(self, task_id=None, params=None, headers=None): Returns information about a task. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Return the task with specified id - (node_id:task_number) - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + (node_id:task_number). + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. """ if task_id in SKIP_IN_PATH: warnings.warn( diff --git a/opensearchpy/_async/client/tasks.pyi b/opensearchpy/_async/client/tasks.pyi index ae777158..14081a2d 100644 --- a/opensearchpy/_async/client/tasks.pyi +++ b/opensearchpy/_async/client/tasks.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index fff32dd7..ff76a3a6 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + import warnings from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -45,24 +55,20 @@ def list(self, params=None, headers=None): Returns a list of tasks. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) + :arg detailed: Return detailed task information. :arg group_by: Group tasks by nodes or parent/child - relationships Valid choices: nodes, parents, none Default: nodes - :arg nodes: A comma-separated list of node IDs or names to limit + relationships. Valid choices: nodes, parents, none + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. """ return self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers @@ -74,23 +80,18 @@ def cancel(self, task_id=None, params=None, headers=None): Cancels a task, if it can be cancelled through an API. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Cancel the task with specified task id - (node_id:task_number) - :arg actions: A comma-separated list of actions that should be + (node_id:task_number). + :arg actions: Comma-separated list of actions that should be cancelled. Leave empty to cancel all. - :arg nodes: A comma-separated list of node IDs or names to limit + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. - :arg wait_for_completion: Should the request block until the - cancellation of the task and its descendant tasks is completed. Defaults - to false + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. """ return self.transport.perform_request( "POST", @@ -105,16 +106,11 @@ def get(self, task_id=None, params=None, headers=None): Returns information about a task. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Return the task with specified id - (node_id:task_number) - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + (node_id:task_number). + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. """ if task_id in SKIP_IN_PATH: warnings.warn( diff --git a/opensearchpy/client/tasks.pyi b/opensearchpy/client/tasks.pyi index 3577bae3..50ad69bb 100644 --- a/opensearchpy/client/tasks.pyi +++ b/opensearchpy/client/tasks.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient diff --git a/utils/generated_file_headers.txt b/utils/generated_file_headers.txt index 16c8aba5..135828ce 100644 --- a/utils/generated_file_headers.txt +++ b/utils/generated_file_headers.txt @@ -1,8 +1,8 @@ # ---------------------------------------------------- -# THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST. -# -# To contribute, please make necessary modifications to either "Python generator": +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": # https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or "OpenAPI specs": +# or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- \ No newline at end of file +# ----------------------------------------------------- From a29be4ac30233c7439580fde20d6adf80e6a3bb0 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Thu, 28 Sep 2023 20:18:53 -0700 Subject: [PATCH 12/80] Integrated generated ingest client APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility (#513) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/client/ingest.py | 71 ++++++++++++++------------- opensearchpy/_async/client/ingest.pyi | 32 +++++------- opensearchpy/client/ingest.py | 71 ++++++++++++++------------- opensearchpy/client/ingest.pyi | 32 +++++------- utils/generate-api.py | 17 ++++--- utils/templates/base | 2 +- 7 files changed, 109 insertions(+), 117 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 07aafc12..7a604e99 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Changed - Integrated generated `tasks client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) +- Integrated generated `ingest client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index dfc8eb1c..cb5253eb 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -25,42 +25,52 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "summary") + @query_params("cluster_manager_timeout", "master_timeout") async def get_pipeline(self, id=None, params=None, headers=None): """ Returns a pipeline. - :arg id: Comma separated list of pipeline ids. Wildcards - supported - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg summary: Return pipelines without their definitions - (default: false) + :arg id: Comma-separated list of pipeline ids. Wildcards + supported. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ return await self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def put_pipeline(self, id, body, params=None, headers=None): """ Creates or updates a pipeline. - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg body: The ingest definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -74,18 +84,19 @@ async def put_pipeline(self, id, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def delete_pipeline(self, id, params=None, headers=None): """ Deletes a pipeline. - :arg id: Pipeline ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg id: Pipeline ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -104,9 +115,9 @@ async def simulate(self, body, id=None, params=None, headers=None): :arg body: The simulate definition - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg verbose: Verbose mode. Display data output for each - processor in executed pipeline + processor in executed pipeline. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -128,13 +139,3 @@ async def processor_grok(self, params=None, headers=None): return await self.transport.perform_request( "GET", "/_ingest/processor/grok", params=params, headers=headers ) - - @query_params() - async def geo_ip_stats(self, params=None, headers=None): - """ - Returns statistical information about geoip databases - - """ - return await self.transport.perform_request( - "GET", "/_ingest/geoip/stats", params=params, headers=headers - ) diff --git a/opensearchpy/_async/client/ingest.pyi b/opensearchpy/_async/client/ingest.pyi index 7e498b6c..40d3c7d9 100644 --- a/opensearchpy/_async/client/ingest.pyi +++ b/opensearchpy/_async/client/ingest.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -33,9 +42,8 @@ class IngestClient(NamespacedClient): self, *, id: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., - summary: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -54,8 +62,8 @@ class IngestClient(NamespacedClient): id: Any, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -74,8 +82,8 @@ class IngestClient(NamespacedClient): self, id: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -125,19 +133,3 @@ class IngestClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def geo_ip_stats( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index e40f1a3d..fb9d4f79 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -25,42 +25,52 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "summary") + @query_params("cluster_manager_timeout", "master_timeout") def get_pipeline(self, id=None, params=None, headers=None): """ Returns a pipeline. - :arg id: Comma separated list of pipeline ids. Wildcards - supported - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg summary: Return pipelines without their definitions - (default: false) + :arg id: Comma-separated list of pipeline ids. Wildcards + supported. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ return self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def put_pipeline(self, id, body, params=None, headers=None): """ Creates or updates a pipeline. - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg body: The ingest definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -74,18 +84,19 @@ def put_pipeline(self, id, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def delete_pipeline(self, id, params=None, headers=None): """ Deletes a pipeline. - :arg id: Pipeline ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg id: Pipeline ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -104,9 +115,9 @@ def simulate(self, body, id=None, params=None, headers=None): :arg body: The simulate definition - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg verbose: Verbose mode. Display data output for each - processor in executed pipeline + processor in executed pipeline. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -128,13 +139,3 @@ def processor_grok(self, params=None, headers=None): return self.transport.perform_request( "GET", "/_ingest/processor/grok", params=params, headers=headers ) - - @query_params() - def geo_ip_stats(self, params=None, headers=None): - """ - Returns statistical information about geoip databases - - """ - return self.transport.perform_request( - "GET", "/_ingest/geoip/stats", params=params, headers=headers - ) diff --git a/opensearchpy/client/ingest.pyi b/opensearchpy/client/ingest.pyi index bbc5aba2..251071e3 100644 --- a/opensearchpy/client/ingest.pyi +++ b/opensearchpy/client/ingest.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -33,9 +42,8 @@ class IngestClient(NamespacedClient): self, *, id: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., - summary: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -54,8 +62,8 @@ class IngestClient(NamespacedClient): id: Any, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -74,8 +82,8 @@ class IngestClient(NamespacedClient): self, id: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -125,19 +133,3 @@ class IngestClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def geo_ip_stats( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/utils/generate-api.py b/utils/generate-api.py index f13a41b5..adab04bf 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -448,9 +448,11 @@ def read_modules(): for path in data["paths"]: for x in data["paths"][path]: - if "deprecated" not in data["paths"][path][x]: - data["paths"][path][x].update({"path": path, "method": x}) - list_of_dicts.append(data["paths"][path][x]) + if data["paths"][path][x]["x-operation-group"] == "nodes.hot_threads": + if "deprecated" in data["paths"][path][x]: + continue + data["paths"][path][x].update({"path": path, "method": x}) + list_of_dicts.append(data["paths"][path][x]) # Update parameters in each endpoint for p in list_of_dicts: @@ -484,12 +486,15 @@ def read_modules(): A.update({"type": "enum"}) A.update({"options": m["schema"]["enum"]}) - if "deprecated" in m: - A.update({"deprecated": m["deprecated"]}) + if "deprecated" in m["schema"]: + A.update({"deprecated": m["schema"]["deprecated"]}) + A.update( + {"deprecation_message": m["schema"]["x-deprecation-message"]} + ) params_new.update({m["name"]: A}) # Removing the deprecated "type" - if "type" in params_new: + if p["x-operation-group"] != "nodes.hot_threads" and "type" in params_new: params_new.pop("type") if bool(params_new): diff --git a/utils/templates/base b/utils/templates/base index 4a1249ed..971efbc9 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -21,7 +21,7 @@ {% for p, info in api.params %} {% filter wordwrap(72, wrapstring="\n ") %} - :arg {{ p }}: {{ info.description }}{% if info.options %} Valid choices: {{ info.options|join(", ") }}{% endif %}{% if info.default %} Default: {{ info.default }}{% endif %} + :arg {{ p }}{% if info.deprecated %} (Deprecated: {{ info['deprecation_message'][:-1] }}){% endif %}: {{ info.description }}{% if info.options %} Valid choices: {{ info.options|join(", ") }}{% endif %}{% if info.default %} Default: {{ info.default }}{% endif %} {% endfilter %} {% endfor %} From 335111bef4f3d6e674fe8fe88f01196e06e651ab Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Tue, 3 Oct 2023 07:40:51 -0700 Subject: [PATCH 13/80] Integrated generated dangling_indices client APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility (#511) Signed-off-by: saimedhi Signed-off-by: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Signed-off-by: roma2023 --- CHANGELOG.md | 1 + .../_async/client/dangling_indices.py | 44 +++++++++++++------ .../_async/client/dangling_indices.pyi | 13 +++++- opensearchpy/client/dangling_indices.py | 44 +++++++++++++------ opensearchpy/client/dangling_indices.pyi | 13 +++++- 5 files changed, 83 insertions(+), 32 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a604e99..bdd4f795 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Changed - Integrated generated `tasks client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Integrated generated `ingest client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) +- Integrated generated `dangling_indices client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#511](https://github.com/opensearch-project/opensearch-py/pull/511)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index ff9e533f..cf382c52 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -25,24 +25,37 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class DanglingIndicesClient(NamespacedClient): @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) async def delete_dangling_index(self, index_uuid, params=None, headers=None): """ - Deletes the specified dangling index + Deletes the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to delete - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") @@ -55,19 +68,22 @@ async def delete_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) async def import_dangling_index(self, index_uuid, params=None, headers=None): """ - Imports the specified dangling index + Imports the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to import - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") diff --git a/opensearchpy/_async/client/dangling_indices.pyi b/opensearchpy/_async/client/dangling_indices.pyi index c9bb9ec1..17ab1ac8 100644 --- a/opensearchpy/_async/client/dangling_indices.pyi +++ b/opensearchpy/_async/client/dangling_indices.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -34,8 +43,8 @@ class DanglingIndicesClient(NamespacedClient): index_uuid: Any, *, accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -55,8 +64,8 @@ class DanglingIndicesClient(NamespacedClient): index_uuid: Any, *, accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index 4d1b5a36..b04698ad 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -25,24 +25,37 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class DanglingIndicesClient(NamespacedClient): @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) def delete_dangling_index(self, index_uuid, params=None, headers=None): """ - Deletes the specified dangling index + Deletes the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to delete - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") @@ -55,19 +68,22 @@ def delete_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) def import_dangling_index(self, index_uuid, params=None, headers=None): """ - Imports the specified dangling index + Imports the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to import - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") diff --git a/opensearchpy/client/dangling_indices.pyi b/opensearchpy/client/dangling_indices.pyi index 56e4a72f..203805a1 100644 --- a/opensearchpy/client/dangling_indices.pyi +++ b/opensearchpy/client/dangling_indices.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -34,8 +43,8 @@ class DanglingIndicesClient(NamespacedClient): index_uuid: Any, *, accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -55,8 +64,8 @@ class DanglingIndicesClient(NamespacedClient): index_uuid: Any, *, accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., From 124196859bfacfa8a05c21ba8856d60f0a71fbff Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Tue, 3 Oct 2023 07:41:41 -0700 Subject: [PATCH 14/80] Bumps from >=1.21.1 to >=1.26.9 (#518) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + setup.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bdd4f795..85a4965b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Dependencies - Bumps `sphinx` from <7.1 to <7.3 - Bumps `urllib3` from >=1.21.1, <2 to >=1.21.1 ([#466](https://github.com/opensearch-project/opensearch-py/pull/466)) +- Bumps `urllib3` from >=1.21.1 to >=1.26.9 ([#518](https://github.com/opensearch-project/opensearch-py/pull/518)) ## [2.3.1] ### Added diff --git a/setup.py b/setup.py index 3bce64d1..c21e053f 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,7 @@ if package == module_dir or package.startswith(module_dir + ".") ] install_requires = [ - "urllib3>=1.21.1", + "urllib3>=1.26.9", "requests>=2.4.0, <3.0.0", "six", "python-dateutil", From aa4b6d6b4e318867566d79747107300bbd4359c6 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Tue, 3 Oct 2023 07:43:05 -0700 Subject: [PATCH 15/80] Modified generator to generate 'options' and 'default value' for parameters in description (#519) Signed-off-by: saimedhi Signed-off-by: roma2023 --- utils/generate-api.py | 7 +++++++ utils/templates/base | 3 ++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/utils/generate-api.py b/utils/generate-api.py index adab04bf..cfe12af4 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -482,6 +482,10 @@ def read_modules(): for m in params: A = dict(type=m["schema"]["type"], description=m["description"]) + + if "default" in m["schema"]: + A.update({"default": m["schema"]["default"]}) + if "enum" in m["schema"]: A.update({"type": "enum"}) A.update({"options": m["schema"]["enum"]}) @@ -508,6 +512,9 @@ def read_modules(): if "description" in n: B.update({"description": n["description"]}) + if "x-enum-options" in n["schema"]: + B.update({"options": n["schema"]["x-enum-options"]}) + deprecated_new = {} if "deprecated" in n: B.update({"deprecated": n["deprecated"]}) diff --git a/utils/templates/base b/utils/templates/base index 971efbc9..9b58b6c2 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -21,7 +21,8 @@ {% for p, info in api.params %} {% filter wordwrap(72, wrapstring="\n ") %} - :arg {{ p }}{% if info.deprecated %} (Deprecated: {{ info['deprecation_message'][:-1] }}){% endif %}: {{ info.description }}{% if info.options %} Valid choices: {{ info.options|join(", ") }}{% endif %}{% if info.default %} Default: {{ info.default }}{% endif %} + :arg {{ p }}{% if info.deprecated %} (Deprecated: {{ info['deprecation_message'][:-1] }}){% endif %}: {{ info.description }}{% if info.options %} Valid choices: {{ info.options|join(", ") }}{% endif %} + {% if info.default is defined %}{% if info.default is not none %}{% if info.default is sameas(false) %} (default: false){% else %} (default: {{ info.default }}){% endif %}{% endif %}{% endif %} {% endfilter %} {% endfor %} From 626659bef75e20c35e1e34423681eefeebd69cb2 Mon Sep 17 00:00:00 2001 From: Bhavani Ravi Date: Tue, 3 Oct 2023 20:15:35 +0530 Subject: [PATCH 16/80] fixes: #512 Wrong return type hint in async_scan (#520) * fixes: #512 Wrong return type hint in async_scan Signed-off-by: Bhavani Ravi * add: changelog Signed-off-by: Bhavani Ravi --------- Signed-off-by: Bhavani Ravi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/helpers/actions.pyi | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 85a4965b..92691507 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -126,6 +126,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Removed patch versions in integration tests for OpenSearch 1.0.0 - 2.3.0 to reduce Github Action jobs ([#262](https://github.com/opensearch-project/opensearch-py/pull/262)) ### Fixed - Fixed DeprecationWarning emitted from urllib3 1.26.13+ ([#246](https://github.com/opensearch-project/opensearch-py/pull/246)) +- Fixed Wrong return type hint in `async_scan` ([520](https://github.com/opensearch-project/opensearch-py/pull/520)) ### Security [Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...HEAD diff --git a/opensearchpy/_async/helpers/actions.pyi b/opensearchpy/_async/helpers/actions.pyi index be000ae8..cd6b6974 100644 --- a/opensearchpy/_async/helpers/actions.pyi +++ b/opensearchpy/_async/helpers/actions.pyi @@ -100,7 +100,7 @@ def async_scan( clear_scroll: bool = ..., scroll_kwargs: Optional[Mapping[str, Any]] = ..., **kwargs: Any -) -> AsyncGenerator[int, None]: ... +) -> AsyncGenerator[dict[str, Any], None]: ... async def async_reindex( client: AsyncOpenSearch, source_index: Union[str, Collection[str]], From c261f88a5177d7e474deac6e23f276d7ba3a6617 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Tue, 3 Oct 2023 15:00:49 -0700 Subject: [PATCH 17/80] Integrated generated 'nodes' client APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility (#514) Signed-off-by: saimedhi Signed-off-by: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Signed-off-by: Daniel (dB.) Doubrovkine Co-authored-by: Daniel (dB.) Doubrovkine Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/client/nodes.py | 118 ++++++++++++++------------- opensearchpy/_async/client/nodes.pyi | 10 ++- opensearchpy/client/nodes.py | 118 ++++++++++++++------------- opensearchpy/client/nodes.pyi | 10 ++- 5 files changed, 143 insertions(+), 114 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 92691507..0664e665 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Integrated generated `tasks client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Integrated generated `ingest client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) - Integrated generated `dangling_indices client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#511](https://github.com/opensearch-project/opensearch-py/pull/511)) +- Integrated generated `nodes client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index d437fd17..e0e8b06b 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import NamespacedClient, _make_path, query_params @@ -37,12 +47,12 @@ async def reload_secure_settings( Reloads secure settings. - :arg body: An object containing the password for the - opensearch keystore - :arg node_id: A comma-separated list of node IDs to span the + :arg body: An object containing the password for the opensearch + keystore + :arg node_id: Comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "POST", @@ -58,16 +68,16 @@ async def info(self, node_id=None, metric=None, params=None, headers=None): Returns information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg metric: A comma-separated list of metrics you wish - returned. Leave empty to return all. Valid choices: settings, os, - process, jvm, thread_pool, transport, http, plugins, ingest - :arg flat_settings: Return settings in flat format (default: + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg metric: Comma-separated list of metrics you wish returned. + Leave empty to return all. Valid choices: settings, os, process, jvm, + thread_pool, transport, http, plugins, ingest + :arg flat_settings: Return settings in flat format. (default: false) - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, metric), params=params, headers=headers @@ -79,7 +89,6 @@ async def info(self, node_id=None, metric=None, params=None, headers=None): "fields", "groups", "include_segment_file_sizes", - "include_unloaded_segments", "level", "timeout", "types", @@ -91,37 +100,34 @@ async def stats( Returns statistical information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, breaker, fs, http, indices, jvm, os, + metrics. Valid choices: _all, breaker, fs, http, indices, jvm, os, process, thread_pool, transport, discovery, indexing_pressure :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) - metric isn't specified. Valid choices: _all, completion, docs, - fielddata, query_cache, flush, get, indexing, merge, request_cache, - refresh, search, segments, store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) - :arg groups: A comma-separated list of search groups for - `search` index metric + metric isn't specified. Valid choices: _all, store, indexing, get, + search, merge, flush, refresh, query_cache, fielddata, docs, warmer, + completion, segments, translog, suggest, request_cache, recovery + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory + applies if segment stats are requested). (default: false) :arg level: Return indices stats aggregated at index, node or - shard level Valid choices: indices, node, shards Default: node - :arg timeout: Explicit operation timeout - :arg types: A comma-separated list of document types for the - `indexing` index metric + shard level. Valid choices: indices, node, shards + :arg timeout: Operation timeout. + :arg types: Comma-separated list of document types for the + `indexing` index metric. """ return await self.transport.perform_request( "GET", @@ -138,21 +144,21 @@ async def hot_threads(self, node_id=None, params=None, headers=None): Returns information about hot threads on each node in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg doc_type: The type to sample (default: cpu) Valid choices: - cpu, wait, block + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg doc_type: The type to sample. Valid choices: cpu, wait, + block :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty - task queue (default: true) - :arg interval: The interval for the second sampling of threads - :arg snapshots: Number of samples of thread stacktrace (default: - 10) + task queue. (default: True) + :arg interval: The interval for the second sampling of threads. + :arg snapshots: Number of samples of thread stacktrace. + (default: 10) :arg threads: Specify the number of threads to provide - information for (default: 3) - :arg timeout: Explicit operation timeout + information for. (default: 3) + :arg timeout: Operation timeout. """ # type is a reserved word so it cannot be used, use doc_type instead if "doc_type" in params: @@ -171,13 +177,13 @@ async def usage(self, node_id=None, metric=None, params=None, headers=None): Returns low-level information about REST actions usage on nodes. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, rest_actions - :arg timeout: Explicit operation timeout + metrics. Valid choices: _all, rest_actions + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", diff --git a/opensearchpy/_async/client/nodes.pyi b/opensearchpy/_async/client/nodes.pyi index 5f108df4..b34a7ba9 100644 --- a/opensearchpy/_async/client/nodes.pyi +++ b/opensearchpy/_async/client/nodes.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -79,7 +88,6 @@ class NodesClient(NamespacedClient): fields: Optional[Any] = ..., groups: Optional[Any] = ..., include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., level: Optional[Any] = ..., timeout: Optional[Any] = ..., types: Optional[Any] = ..., diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 2773002b..9dc1e1ab 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import NamespacedClient, _make_path, query_params @@ -37,12 +47,12 @@ def reload_secure_settings( Reloads secure settings. - :arg body: An object containing the password for the - opensearch keystore - :arg node_id: A comma-separated list of node IDs to span the + :arg body: An object containing the password for the opensearch + keystore + :arg node_id: Comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return self.transport.perform_request( "POST", @@ -58,16 +68,16 @@ def info(self, node_id=None, metric=None, params=None, headers=None): Returns information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg metric: A comma-separated list of metrics you wish - returned. Leave empty to return all. Valid choices: settings, os, - process, jvm, thread_pool, transport, http, plugins, ingest - :arg flat_settings: Return settings in flat format (default: + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg metric: Comma-separated list of metrics you wish returned. + Leave empty to return all. Valid choices: settings, os, process, jvm, + thread_pool, transport, http, plugins, ingest + :arg flat_settings: Return settings in flat format. (default: false) - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", _make_path("_nodes", node_id, metric), params=params, headers=headers @@ -79,7 +89,6 @@ def info(self, node_id=None, metric=None, params=None, headers=None): "fields", "groups", "include_segment_file_sizes", - "include_unloaded_segments", "level", "timeout", "types", @@ -91,37 +100,34 @@ def stats( Returns statistical information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, breaker, fs, http, indices, jvm, os, + metrics. Valid choices: _all, breaker, fs, http, indices, jvm, os, process, thread_pool, transport, discovery, indexing_pressure :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) - metric isn't specified. Valid choices: _all, completion, docs, - fielddata, query_cache, flush, get, indexing, merge, request_cache, - refresh, search, segments, store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) - :arg groups: A comma-separated list of search groups for - `search` index metric + metric isn't specified. Valid choices: _all, store, indexing, get, + search, merge, flush, refresh, query_cache, fielddata, docs, warmer, + completion, segments, translog, suggest, request_cache, recovery + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory + applies if segment stats are requested). (default: false) :arg level: Return indices stats aggregated at index, node or - shard level Valid choices: indices, node, shards Default: node - :arg timeout: Explicit operation timeout - :arg types: A comma-separated list of document types for the - `indexing` index metric + shard level. Valid choices: indices, node, shards + :arg timeout: Operation timeout. + :arg types: Comma-separated list of document types for the + `indexing` index metric. """ return self.transport.perform_request( "GET", @@ -138,21 +144,21 @@ def hot_threads(self, node_id=None, params=None, headers=None): Returns information about hot threads on each node in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg doc_type: The type to sample (default: cpu) Valid choices: - cpu, wait, block + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg doc_type: The type to sample. Valid choices: cpu, wait, + block :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty - task queue (default: true) - :arg interval: The interval for the second sampling of threads - :arg snapshots: Number of samples of thread stacktrace (default: - 10) + task queue. (default: True) + :arg interval: The interval for the second sampling of threads. + :arg snapshots: Number of samples of thread stacktrace. + (default: 10) :arg threads: Specify the number of threads to provide - information for (default: 3) - :arg timeout: Explicit operation timeout + information for. (default: 3) + :arg timeout: Operation timeout. """ # type is a reserved word so it cannot be used, use doc_type instead if "doc_type" in params: @@ -171,13 +177,13 @@ def usage(self, node_id=None, metric=None, params=None, headers=None): Returns low-level information about REST actions usage on nodes. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, rest_actions - :arg timeout: Explicit operation timeout + metrics. Valid choices: _all, rest_actions + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", diff --git a/opensearchpy/client/nodes.pyi b/opensearchpy/client/nodes.pyi index d0f7beb4..67e5a05c 100644 --- a/opensearchpy/client/nodes.pyi +++ b/opensearchpy/client/nodes.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -79,7 +88,6 @@ class NodesClient(NamespacedClient): fields: Optional[Any] = ..., groups: Optional[Any] = ..., include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., level: Optional[Any] = ..., timeout: Optional[Any] = ..., types: Optional[Any] = ..., From 40f62fbdd90a13515c3ccf682067ca1ede74558e Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Tue, 3 Oct 2023 16:01:41 -0700 Subject: [PATCH 18/80] updated changelog (#522) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0664e665..39842a66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,10 +6,10 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Changed -- Integrated generated `tasks client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) -- Integrated generated `ingest client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) -- Integrated generated `dangling_indices client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#511](https://github.com/opensearch-project/opensearch-py/pull/511)) -- Integrated generated `nodes client` APIs into the existing module, ensuring alignment with the server and maintaining backward compatibility ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) +- Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) +- Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) +- Generate `dangling_indices` client from API specs ([#511](https://github.com/opensearch-project/opensearch-py/pull/511)) +- Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed From c13dd313fd27742ac036ed05ff731e2f3e69d170 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Fri, 6 Oct 2023 13:37:56 -0700 Subject: [PATCH 19/80] Bump version to 2.3.2 (#524) Signed-off-by: saimedhi Signed-off-by: roma2023 --- .github/workflows/integration.yml | 2 +- .github/workflows/unified-release.yml | 2 +- CHANGELOG.md | 15 ++++++++++++--- COMPATIBILITY.md | 11 ++++++----- noxfile.py | 2 +- opensearchpy/_version.py | 2 +- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 89d9f46c..106e940a 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0' ] + opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0' ] secured: [ "true", "false" ] steps: diff --git a/.github/workflows/unified-release.yml b/.github/workflows/unified-release.yml index 1551cfe2..cddea14a 100644 --- a/.github/workflows/unified-release.yml +++ b/.github/workflows/unified-release.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - stack_version: ['2.3.1'] + stack_version: ['2.3.2'] steps: - name: Checkout diff --git a/CHANGELOG.md b/CHANGELOG.md index 39842a66..f0438f15 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,8 +17,16 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Security ### Dependencies - Bumps `sphinx` from <7.1 to <7.3 -- Bumps `urllib3` from >=1.21.1, <2 to >=1.21.1 ([#466](https://github.com/opensearch-project/opensearch-py/pull/466)) -- Bumps `urllib3` from >=1.21.1 to >=1.26.9 ([#518](https://github.com/opensearch-project/opensearch-py/pull/518)) + +## [2.3.2] +### Added +### Changed +### Deprecated +### Removed +### Fixed +### Security +### Dependencies +- Bumps `urllib3` from >=1.21.1, <2 to >=1.26.9 ([#518](https://github.com/opensearch-project/opensearch-py/pull/518)) ## [2.3.1] ### Added @@ -130,10 +138,11 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Fixed Wrong return type hint in `async_scan` ([520](https://github.com/opensearch-project/opensearch-py/pull/520)) ### Security -[Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...HEAD +[Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.2...HEAD [2.0.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.0...v2.0.1 [2.1.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.1...v2.1.0 [2.1.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.0...v2.1.1 [2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 [2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 [2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 +[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 diff --git a/COMPATIBILITY.md b/COMPATIBILITY.md index a72261b3..0634f6cc 100644 --- a/COMPATIBILITY.md +++ b/COMPATIBILITY.md @@ -9,11 +9,12 @@ The below matrix shows the compatibility of the [`opensearch-py`](https://pypi.o | --- | --- | --- | | 1.0.0 | 1.0.0-1.2.4 | | | 1.1.0 | 1.3.0-1.3.7 | | -| 2.0.x | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.1.x | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.2.0 | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.3.0 | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.3.1 | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.0.x | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.1.x | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.2.0 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.3.0 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.3.1 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.3.2 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | ## Upgrading diff --git a/noxfile.py b/noxfile.py index 3504ff75..a5da2b60 100644 --- a/noxfile.py +++ b/noxfile.py @@ -36,7 +36,7 @@ ) -@nox.session(python=["2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) +@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) def test(session): session.install(".") session.install("-r", "dev-requirements.txt") diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 3985132a..82fac929 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -24,4 +24,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__ = "2.3.1" +__versionstr__ = "2.3.2" From 36df6e8dda98101f643a5bbf4fe17415ccec83dc Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Mon, 9 Oct 2023 13:45:18 -0400 Subject: [PATCH 20/80] Fix: typos. (#526) * Fix: typo. Signed-off-by: dblock * Fix: typo. Signed-off-by: dblock * Fixed its. Signed-off-by: dblock * Added Visual Code settings to .gitignore. Signed-off-by: dblock * Added loop type for async client. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- .gitignore | 5 ++++- opensearchpy/_async/http_aiohttp.pyi | 3 ++- opensearchpy/_async/transport.py | 6 +++--- opensearchpy/connection/base.py | 4 ++-- opensearchpy/connection_pool.py | 14 +++++++------- opensearchpy/helpers/utils.py | 6 +++--- opensearchpy/transport.py | 6 +++--- .../test_server/test_helpers/test_data.py | 2 +- .../test_server/test_plugins/test_alerting.py | 6 +++--- .../test_async/test_server/test_rest_api_spec.py | 6 +++--- .../test_server/test_helpers/test_data.py | 2 +- .../test_server/test_plugins/test_alerting.py | 6 +++--- .../test_server/test_rest_api_spec.py | 2 +- 13 files changed, 36 insertions(+), 32 deletions(-) diff --git a/.gitignore b/.gitignore index 019de716..153eea24 100644 --- a/.gitignore +++ b/.gitignore @@ -148,6 +148,9 @@ test_opensearch/cover test_opensearch/local.py .ci/output -#Vi text editor +# vi text editor .*.swp *~ + +# Visual Studio Code +.vscode \ No newline at end of file diff --git a/opensearchpy/_async/http_aiohttp.pyi b/opensearchpy/_async/http_aiohttp.pyi index 4dea4317..223fdfff 100644 --- a/opensearchpy/_async/http_aiohttp.pyi +++ b/opensearchpy/_async/http_aiohttp.pyi @@ -24,6 +24,7 @@ # specific language governing permissions and limitations # under the License. +from asyncio import AbstractEventLoop from typing import Any, Collection, Mapping, Optional, Tuple, Union from ..connection import Connection @@ -65,7 +66,7 @@ class AIOHttpConnection(AsyncConnection): ssl_context: Optional[Any] = ..., http_compress: Optional[bool] = ..., opaque_id: Optional[str] = ..., - loop: Any = ..., + loop: Optional[AbstractEventLoop] = ..., trust_env: bool = ..., **kwargs: Any ) -> None: ... diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index e93344bc..3db4516c 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -343,14 +343,14 @@ def get_connection(self): async def perform_request(self, method, url, headers=None, params=None, body=None): """ Perform the actual request. Retrieve a connection from the connection - pool, pass all the information to it's perform_request method and + pool, pass all the information to its perform_request method and return the data. If an exception was raised, mark the connection as failed and retry (up to `max_retries` times). If the operation was successful and the connection used was previously - marked as dead, mark it as live, resetting it's failure count. + marked as dead, mark it as live, resetting its failure count. :arg method: HTTP method to use :arg url: absolute url (without host) to target @@ -412,7 +412,7 @@ async def perform_request(self, method, url, headers=None, params=None, body=Non raise e else: - # connection didn't fail, confirm it's live status + # connection didn't fail, confirm its live status self.connection_pool.mark_live(connection) if method == "HEAD": diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index 435996b9..05edca73 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -56,7 +56,7 @@ class Connection(object): """ Class responsible for maintaining a connection to an OpenSearch node. It - holds persistent connection pool to it and it's main interface + holds persistent connection pool to it and its main interface (`perform_request`) is thread-safe. Also responsible for logging. @@ -158,7 +158,7 @@ def _raise_warnings(self, warning_headers): # Format is: '(number) OpenSearch-(version)-(instance) "(message)"' warning_messages = [] for header in warning_headers: - # Because 'Requests' does it's own folding of multiple HTTP headers + # Because 'Requests' does its own folding of multiple HTTP headers # into one header delimited by commas (totally standard compliant, just # annoying for cases like this) we need to expect there may be # more than one message per 'Warning' header. diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index 0416fbec..61873748 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -55,8 +55,8 @@ class ConnectionSelector(object): process it will be the dictionary returned by the `host_info_callback`. Example of where this would be useful is a zone-aware selector that would - only select connections from it's own zones and only fall back to other - connections where there would be none in it's zones. + only select connections from its own zones and only fall back to other + connections where there would be none in its zones. """ def __init__(self, opts): @@ -112,7 +112,7 @@ class ConnectionPool(object): future reference. Upon each request the `Transport` will ask for a `Connection` via the - `get_connection` method. If the connection fails (it's `perform_request` + `get_connection` method. If the connection fails (its `perform_request` raises a `ConnectionError`) it will be marked as dead (via `mark_dead`) and put on a timeout (if it fails N times in a row the timeout is exponentially longer - the formula is `default_timeout * 2 ** (fail_count - 1)`). When @@ -132,7 +132,7 @@ def __init__( ): """ :arg connections: list of tuples containing the - :class:`~opensearchpy.Connection` instance and it's options + :class:`~opensearchpy.Connection` instance and its options :arg dead_timeout: number of seconds a connection should be retired for after a failure, increases on consecutive failures :arg timeout_cutoff: number of consecutive failures after which the @@ -211,7 +211,7 @@ def mark_live(self, connection): def resurrect(self, force=False): """ Attempt to resurrect a connection from the dead pool. It will try to - locate one (not all) eligible (it's timeout is over) connection to + locate one (not all) eligible (its timeout is over) connection to return to the live pool. Any resurrected connection is also returned. :arg force: resurrect a connection even if there is none eligible (used @@ -245,7 +245,7 @@ def resurrect(self, force=False): self.dead.put((timeout, connection)) return - # either we were forced or the connection is elligible to be retried + # either we were forced or the connection is eligible to be retried self.connections.append(connection) logger.info("Resurrecting connection %r (force=%s).", connection, force) return connection @@ -259,7 +259,7 @@ def get_connection(self): no connections are available and passes the list of live connections to the selector instance to choose from. - Returns a connection instance and it's current fail count. + Returns a connection instance and its current fail count. """ self.resurrect() connections = self.connections[:] diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 3ebea18e..04f2ee37 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -222,7 +222,7 @@ class DslMeta(type): It then uses the information from that registry (as well as `name` and `shortcut` attributes from the base class) to construct any subclass based - on it's name. + on its name. For typical use see `QueryMeta` and `Query` in `opensearchpy.query`. """ @@ -235,7 +235,7 @@ def __init__(cls, name, bases, attrs): if not hasattr(cls, "_type_shortcut"): return if cls.name is None: - # abstract base class, register it's shortcut + # abstract base class, register its shortcut cls._types[cls._type_name] = cls._type_shortcut # and create a registry for subclasses if not hasattr(cls, "_classes"): @@ -264,7 +264,7 @@ class DslBase(object): - to_dict method to serialize into dict (to be sent via opensearch-py) - basic logical operators (&, | and ~) using a Bool(Filter|Query) TODO: move into a class specific for Query/Filter - - respects the definition of the class and (de)serializes it's + - respects the definition of the class and (de)serializes its attributes based on the `_param_defs` definition (for example turning all values in the `must` attribute into Query objects) """ diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index c1d69d2c..32c9baf4 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -341,14 +341,14 @@ def mark_dead(self, connection): def perform_request(self, method, url, headers=None, params=None, body=None): """ Perform the actual request. Retrieve a connection from the connection - pool, pass all the information to it's perform_request method and + pool, pass all the information to its perform_request method and return the data. If an exception was raised, mark the connection as failed and retry (up to `max_retries` times). If the operation was successful and the connection used was previously - marked as dead, mark it as live, resetting it's failure count. + marked as dead, mark it as live, resetting its failure count. :arg method: HTTP method to use :arg url: absolute url (without host) to target @@ -409,7 +409,7 @@ def perform_request(self, method, url, headers=None, params=None, body=None): raise e else: - # connection didn't fail, confirm it's live status + # connection didn't fail, confirm its live status self.connection_pool.mark_live(connection) if method == "HEAD": diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index d513bcff..1194304e 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -947,7 +947,7 @@ async def create_git_index(client, index): ], "committer": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "stats": {"deletions": 0, "insertions": 53, "lines": 53, "files": 2}, - "description": "From_dict, Q(dict) and bool query parses it's subqueries", + "description": "From_dict, Q(dict) and bool query parses its subqueries", "author": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "parent_shas": ["d407f99d1959b7b862a541c066d9fd737ce913f3"], "committed_date": "2014-03-06T20:24:30", diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index f3f7fe32..2ef87bd3 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -127,7 +127,7 @@ async def test_search_monitor(self): # Create a dummy monitor await self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -145,7 +145,7 @@ async def test_get_monitor(self): # Create a dummy monitor await self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -169,7 +169,7 @@ async def test_run_monitor(self): # Create a dummy monitor await self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index 27b20113..0773aab0 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -26,7 +26,7 @@ """ -Dynamically generated set of TestCases based on set of yaml files decribing +Dynamically generated set of TestCases based on set of yaml files describing some integration tests. These files are shared among all official OpenSearch clients. """ @@ -106,7 +106,7 @@ async def run(self): pass async def run_code(self, test): - """Execute an instruction based on it's type.""" + """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 action_type, action = list(action.items())[0] @@ -126,7 +126,7 @@ async def run_do(self, action): assert len(action) == 1 # Remove the x_pack_rest_user authentication - # if it's given via headers. We're already authenticated + # if its given via headers. We're already authenticated # via the 'elastic' user. if ( headers diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 20b63e39..059a983a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -964,7 +964,7 @@ def create_git_index(client, index): ], "committer": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "stats": {"deletions": 0, "insertions": 53, "lines": 53, "files": 2}, - "description": "From_dict, Q(dict) and bool query parses it's subqueries", + "description": "From_dict, Q(dict) and bool query parses its subqueries", "author": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "parent_shas": ["d407f99d1959b7b862a541c066d9fd737ce913f3"], "committed_date": "2014-03-06T20:24:30", diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index 406bd71f..3a503e43 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -123,7 +123,7 @@ def test_search_monitor(self): # Create a dummy monitor self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -141,7 +141,7 @@ def test_get_monitor(self): # Create a dummy monitor self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -165,7 +165,7 @@ def test_run_monitor(self): # Create a dummy monitor self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 7d1cbf51..b5d890ab 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -205,7 +205,7 @@ def run(self): pass def run_code(self, test): - """Execute an instruction based on it's type.""" + """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 action_type, action = list(action.items())[0] From 2f42f68252fdeaf26623a15d2f966d5e9336b0bd Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Mon, 9 Oct 2023 13:47:26 -0700 Subject: [PATCH 21/80] Modified generator to generate api deprecation warnings (#527) Signed-off-by: saimedhi Signed-off-by: roma2023 --- utils/generate-api.py | 4 ++++ utils/templates/base | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/utils/generate-api.py b/utils/generate-api.py index cfe12af4..2cb93eb5 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -263,6 +263,7 @@ def __init__(self, namespace, name, definition, is_pyi=False): self.description = "" self.doc_url = "" self.stability = self._def.get("stability", "stable") + self.deprecation_message = self._def.get("deprecation_message") if isinstance(definition["documentation"], str): self.doc_url = definition["documentation"] @@ -560,6 +561,9 @@ def read_modules(): documentation = {"description": z["description"]} api.update({"documentation": documentation}) + if "deprecation_message" not in api and "x-deprecation-message" in z: + api.update({"deprecation_message": z["x-deprecation-message"]}) + if "params" not in api and "params" in z: api.update({"params": z["params"]}) diff --git a/utils/templates/base b/utils/templates/base index 9b58b6c2..47bb5956 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -28,6 +28,10 @@ {% endfor %} {% endif %} """ + {% if api.deprecation_message %} + from warnings import warn + warn("Deprecated: {{ api.deprecation_message }}") + {% endif %} {% include "substitutions" %} {% include "required" %} {% if api.body.serialize == "bulk" %} From 5e5a4dde5b3de64109020dbc78a1e04be71d6578 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Mon, 9 Oct 2023 14:55:51 -0700 Subject: [PATCH 22/80] Generate cat client from API specs (#529) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/client/cat.py | 655 ++++++++++++++++------------- opensearchpy/_async/client/cat.pyi | 85 +++- opensearchpy/client/cat.py | 653 +++++++++++++++------------- opensearchpy/client/cat.pyi | 85 +++- 5 files changed, 861 insertions(+), 618 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0438f15..fa812ec5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) - Generate `dangling_indices` client from API specs ([#511](https://github.com/opensearch-project/opensearch-py/pull/511)) - Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) +- Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index f7268308..a4dd9786 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import NamespacedClient, _make_path, query_params @@ -36,19 +46,19 @@ async def aliases(self, name=None, params=None, headers=None): filter and routing infos. - :arg name: A comma-separated list of alias names to return + :arg name: Comma-separated list of alias names. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers @@ -56,12 +66,12 @@ async def aliases(self, name=None, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -71,25 +81,24 @@ async def allocation(self, node_id=None, params=None, headers=None): much disk space they are using. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information - :arg bytes: The unit in which to display byte values Valid + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", @@ -105,15 +114,15 @@ async def count(self, index=None, params=None, headers=None): individual indices. - :arg index: A comma-separated list of index names to limit the - returned information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list of indices to limit the + returned information. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers @@ -125,16 +134,16 @@ async def health(self, params=None, headers=None): Returns a concise representation of the cluster health. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg ts: Set to false to disable timestamping Default: True - :arg v: Verbose mode. Display column headers + :arg ts: Set to false to disable timestamping. (default: True) + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers @@ -146,9 +155,9 @@ async def help(self, params=None, headers=None): Returns help for the Cat APIs. - :arg help: Return help information + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by + to sort by. """ return await self.transport.perform_request( "GET", "/_cat", params=params, headers=headers @@ -156,6 +165,7 @@ async def help(self, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "expand_wildcards", "format", "h", @@ -164,7 +174,6 @@ async def help(self, params=None, headers=None): "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "pri", "s", "time", @@ -176,93 +185,110 @@ async def indices(self, index=None, params=None, headers=None): counts, disk size, ... - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg health: A health status ("green", "yellow", or "red" to - filter only indices matching the specified health status Valid choices: - green, yellow, red - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg health: Health status ('green', 'yellow', or 'red') to + filter only indices matching the specified health status. Valid + choices: green, yellow, red + :arg help: Return help information. (default: false) :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg pri: Set to true to return stats only for primary shards + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg pri: Set to true to return stats only for primary shards. + (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) async def master(self, params=None, headers=None): """ - Returns information about the master node. + Returns information about the cluster-manager node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ from warnings import warn - warn("Deprecated: use `cluster_manager` instead") + warn( + "Deprecated: To promote inclusive language, please use '/_cat/cluster_manager' instead." + ) return await self.transport.perform_request( "GET", "/_cat/master", params=params, headers=headers ) - @query_params("format", "h", "help", "local", "cluster_manager", "s", "v") + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) async def cluster_manager(self, params=None, headers=None): """ - Returns information about the cluster_manager node. + Returns information about the cluster-manager node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/cluster_manager", params=params, headers=headers @@ -270,14 +296,13 @@ async def cluster_manager(self, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "format", "full_id", "h", "help", - "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -287,28 +312,27 @@ async def nodes(self, params=None, headers=None): Returns basic statistics about performance of cluster nodes. - :arg bytes: The unit in which to display byte values Valid + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. :arg full_id: Return the full node ID instead of the shortened - version (default: false) - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory - :arg local: Calculate the selected nodes using the local cluster - state rather than the state from master node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + version. (default: false) + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg local (Deprecated: This parameter does not cause this API + to act locally): Return local information, do not retrieve the state + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers @@ -323,22 +347,22 @@ async def recovery(self, index=None, params=None, headers=None): :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information + names to limit the returned information. :arg active_only: If `true`, the response only includes ongoing - shard recoveries - :arg bytes: The unit in which to display byte values Valid + shard recoveries. (default: false) + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg detailed: If `true`, the response includes detailed - information about shard recoveries - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + information about shard recoveries. (default: false) + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", _make_path("_cat", "recovery", index), params=params, headers=headers @@ -346,12 +370,12 @@ async def recovery(self, index=None, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -361,59 +385,74 @@ async def shards(self, index=None, params=None, headers=None): Provides a detailed view of shard allocation on nodes. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") + @query_params( + "bytes", + "cluster_manager_timeout", + "format", + "h", + "help", + "master_timeout", + "s", + "v", + ) async def segments(self, index=None, params=None, headers=None): """ Provides low-level information about the segments in the shards of an index. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -423,33 +462,34 @@ async def pending_tasks(self, params=None, headers=None): Returns a concise representation of the cluster pending tasks. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/pending_tasks", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "size", "v", @@ -460,23 +500,23 @@ async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None queue and rejected statistics are returned for all thread pools. - :arg thread_pool_patterns: A comma-separated list of regular- - expressions to filter the thread pools in the output - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: The multiplier in which to display values Valid - choices: , k, m, g, t, p - :arg v: Verbose mode. Display column headers + to sort by. + :arg size: The multiplier in which to display values. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", @@ -492,17 +532,17 @@ async def fielddata(self, fields=None, params=None, headers=None): node in the cluster. - :arg fields: A comma-separated list of fields to return in the - output - :arg bytes: The unit in which to display byte values Valid + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", @@ -512,13 +552,12 @@ async def fielddata(self, fields=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "format", "h", "help", - "include_bootstrap", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -527,33 +566,32 @@ async def plugins(self, params=None, headers=None): Returns information about installed plugins across nodes node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_bootstrap: Include bootstrap plugins in the - response + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -562,31 +600,32 @@ async def nodeattrs(self, params=None, headers=None): Returns information about custom node attributes. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -595,31 +634,32 @@ async def repositories(self, params=None, headers=None): Returns information about snapshot repositories registered in the cluster. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from master node - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/repositories", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -629,23 +669,23 @@ async def snapshots(self, repository=None, params=None, headers=None): Returns all snapshots in a specific repository. - :arg repository: Name of repository from which to fetch the - snapshot information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg ignore_unavailable: Set to true to ignore unavailable - snapshots - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", @@ -672,35 +712,37 @@ async def tasks(self, params=None, headers=None): the cluster. - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg nodes: A comma-separated list of node IDs or names to limit + :arg detailed: Return detailed task information. (default: + false) + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -709,58 +751,91 @@ async def templates(self, name=None, params=None, headers=None): Returns information about existing templates. - :arg name: A pattern that returned template names must match - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return await self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) + @query_params() + async def all_pit_segments(self, params=None, headers=None): + """ + Lists all active point-in-time segments. + + """ + return await self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + + @query_params() + async def pit_segments(self, body=None, params=None, headers=None): + """ + List segments for one or several PITs. + + + :arg body: + """ + return await self.transport.perform_request( + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body + ) + @query_params( - "allow_no_match", "format", "from_", "h", "help", "s", "size", "time", "v" + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", ) - async def transforms(self, transform_id=None, params=None, headers=None): + async def segment_replication(self, index=None, params=None, headers=None): """ - Gets configuration and usage information about transforms. + Returns information about both on-going and latest completed Segment + Replication events. - :arg transform_id: The id of the transform for which to get - stats. '_all' or '*' implies all transforms - :arg allow_no_match: Whether to ignore if a wildcard expression - matches no transforms. (This includes `_all` string or when no - transforms have been specified) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg from_: skips a number of transform configs, defaults to 0 - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. (default: false) + :arg bytes: The unit in which to display byte values. Valid + choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. (default: false) + :arg detailed: If `true`, the response includes detailed + information about segment replications. (default: false) + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: specifies a max number of transforms to get, defaults - to 100 - :arg time: The unit in which to display time values Valid + to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ - # from is a reserved word so it cannot be used, use from_ instead - if "from_" in params: - params["from"] = params.pop("from_") - return await self.transport.perform_request( "GET", - _make_path("_cat", "transforms", transform_id), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) diff --git a/opensearchpy/_async/client/cat.pyi b/opensearchpy/_async/client/cat.pyi index c53d43bc..435403e9 100644 --- a/opensearchpy/_async/client/cat.pyi +++ b/opensearchpy/_async/client/cat.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -57,12 +66,12 @@ class CatClient(NamespacedClient): *, node_id: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -143,6 +152,7 @@ class CatClient(NamespacedClient): *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., @@ -151,7 +161,6 @@ class CatClient(NamespacedClient): include_unloaded_segments: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pri: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., @@ -171,12 +180,12 @@ class CatClient(NamespacedClient): async def master( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -194,11 +203,12 @@ class CatClient(NamespacedClient): async def cluster_manager( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -217,14 +227,13 @@ class CatClient(NamespacedClient): self, *, bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., full_id: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -270,12 +279,12 @@ class CatClient(NamespacedClient): *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -296,9 +305,11 @@ class CatClient(NamespacedClient): *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -316,12 +327,12 @@ class CatClient(NamespacedClient): async def pending_tasks( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -341,12 +352,12 @@ class CatClient(NamespacedClient): self, *, thread_pool_patterns: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., v: Optional[Any] = ..., @@ -387,13 +398,12 @@ class CatClient(NamespacedClient): async def plugins( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., - include_bootstrap: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -411,12 +421,12 @@ class CatClient(NamespacedClient): async def nodeattrs( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -434,12 +444,12 @@ class CatClient(NamespacedClient): async def repositories( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -458,12 +468,12 @@ class CatClient(NamespacedClient): self, *, repository: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -508,12 +518,12 @@ class CatClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -528,17 +538,52 @@ class CatClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def transforms( + async def all_pit_segments( self, *, - transform_id: Optional[Any] = ..., - allow_no_match: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def pit_segments( + self, + *, + body: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def segment_replication( + self, + *, + index: Optional[Any] = ..., + active_only: Optional[Any] = ..., + bytes: Optional[Any] = ..., + completed_only: Optional[Any] = ..., + detailed: Optional[Any] = ..., format: Optional[Any] = ..., - from_: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., s: Optional[Any] = ..., - size: Optional[Any] = ..., + shards: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index cc1106d5..8dac68cd 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import NamespacedClient, _make_path, query_params @@ -36,19 +46,19 @@ def aliases(self, name=None, params=None, headers=None): filter and routing infos. - :arg name: A comma-separated list of alias names to return + :arg name: Comma-separated list of alias names. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers @@ -56,12 +66,12 @@ def aliases(self, name=None, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -71,23 +81,24 @@ def allocation(self, node_id=None, params=None, headers=None): much disk space they are using. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information - :arg bytes: The unit in which to display byte values Valid + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", @@ -103,15 +114,15 @@ def count(self, index=None, params=None, headers=None): individual indices. - :arg index: A comma-separated list of index names to limit the - returned information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list of indices to limit the + returned information. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers @@ -123,16 +134,16 @@ def health(self, params=None, headers=None): Returns a concise representation of the cluster health. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg ts: Set to false to disable timestamping Default: True - :arg v: Verbose mode. Display column headers + :arg ts: Set to false to disable timestamping. (default: True) + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers @@ -144,9 +155,9 @@ def help(self, params=None, headers=None): Returns help for the Cat APIs. - :arg help: Return help information + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by + to sort by. """ return self.transport.perform_request( "GET", "/_cat", params=params, headers=headers @@ -154,6 +165,7 @@ def help(self, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "expand_wildcards", "format", "h", @@ -162,7 +174,6 @@ def help(self, params=None, headers=None): "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "pri", "s", "time", @@ -174,93 +185,110 @@ def indices(self, index=None, params=None, headers=None): counts, disk size, ... - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg health: A health status ("green", "yellow", or "red" to - filter only indices matching the specified health status Valid choices: - green, yellow, red - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg health: Health status ('green', 'yellow', or 'red') to + filter only indices matching the specified health status. Valid + choices: green, yellow, red + :arg help: Return help information. (default: false) :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg pri: Set to true to return stats only for primary shards + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg pri: Set to true to return stats only for primary shards. + (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) def master(self, params=None, headers=None): """ - Returns information about the master node. + Returns information about the cluster-manager node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ from warnings import warn - warn("Deprecated: use `cluster_manager` instead") + warn( + "Deprecated: To promote inclusive language, please use '/_cat/cluster_manager' instead." + ) return self.transport.perform_request( "GET", "/_cat/master", params=params, headers=headers ) - @query_params("format", "h", "help", "local", "cluster_manager_timeout", "s", "v") + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) def cluster_manager(self, params=None, headers=None): """ - Returns information about the cluster_manager node. + Returns information about the cluster-manager node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/cluster_manager", params=params, headers=headers @@ -268,14 +296,13 @@ def cluster_manager(self, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "format", "full_id", "h", "help", - "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -285,28 +312,27 @@ def nodes(self, params=None, headers=None): Returns basic statistics about performance of cluster nodes. - :arg bytes: The unit in which to display byte values Valid + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. :arg full_id: Return the full node ID instead of the shortened - version (default: false) - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory - :arg local: Calculate the selected nodes using the local cluster - state rather than the state from master node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + version. (default: false) + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg local (Deprecated: This parameter does not cause this API + to act locally): Return local information, do not retrieve the state + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers @@ -321,22 +347,22 @@ def recovery(self, index=None, params=None, headers=None): :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information + names to limit the returned information. :arg active_only: If `true`, the response only includes ongoing - shard recoveries - :arg bytes: The unit in which to display byte values Valid + shard recoveries. (default: false) + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg detailed: If `true`, the response includes detailed - information about shard recoveries - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + information about shard recoveries. (default: false) + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", _make_path("_cat", "recovery", index), params=params, headers=headers @@ -344,12 +370,12 @@ def recovery(self, index=None, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -359,59 +385,74 @@ def shards(self, index=None, params=None, headers=None): Provides a detailed view of shard allocation on nodes. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") + @query_params( + "bytes", + "cluster_manager_timeout", + "format", + "h", + "help", + "master_timeout", + "s", + "v", + ) def segments(self, index=None, params=None, headers=None): """ Provides low-level information about the segments in the shards of an index. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -421,33 +462,34 @@ def pending_tasks(self, params=None, headers=None): Returns a concise representation of the cluster pending tasks. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/pending_tasks", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "size", "v", @@ -458,23 +500,23 @@ def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): queue and rejected statistics are returned for all thread pools. - :arg thread_pool_patterns: A comma-separated list of regular- - expressions to filter the thread pools in the output - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: The multiplier in which to display values Valid - choices: , k, m, g, t, p - :arg v: Verbose mode. Display column headers + to sort by. + :arg size: The multiplier in which to display values. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", @@ -490,17 +532,17 @@ def fielddata(self, fields=None, params=None, headers=None): node in the cluster. - :arg fields: A comma-separated list of fields to return in the - output - :arg bytes: The unit in which to display byte values Valid + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", @@ -510,13 +552,12 @@ def fielddata(self, fields=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "format", "h", "help", - "include_bootstrap", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -525,33 +566,32 @@ def plugins(self, params=None, headers=None): Returns information about installed plugins across nodes node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_bootstrap: Include bootstrap plugins in the - response + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -560,31 +600,32 @@ def nodeattrs(self, params=None, headers=None): Returns information about custom node attributes. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -593,31 +634,32 @@ def repositories(self, params=None, headers=None): Returns information about snapshot repositories registered in the cluster. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from master node - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/repositories", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", @@ -627,23 +669,23 @@ def snapshots(self, repository=None, params=None, headers=None): Returns all snapshots in a specific repository. - :arg repository: Name of repository from which to fetch the - snapshot information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg ignore_unavailable: Set to true to ignore unavailable - snapshots - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", @@ -670,35 +712,37 @@ def tasks(self, params=None, headers=None): the cluster. - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg nodes: A comma-separated list of node IDs or names to limit + :arg detailed: Return detailed task information. (default: + false) + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid + to sort by. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) @@ -707,58 +751,91 @@ def templates(self, name=None, params=None, headers=None): Returns information about existing templates. - :arg name: A pattern that returned template names must match - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. (default: false) """ return self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) + @query_params() + def all_pit_segments(self, params=None, headers=None): + """ + Lists all active point-in-time segments. + + """ + return self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + + @query_params() + def pit_segments(self, body=None, params=None, headers=None): + """ + List segments for one or several PITs. + + + :arg body: + """ + return self.transport.perform_request( + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body + ) + @query_params( - "allow_no_match", "format", "from_", "h", "help", "s", "size", "time", "v" + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", ) - def transforms(self, transform_id=None, params=None, headers=None): + def segment_replication(self, index=None, params=None, headers=None): """ - Gets configuration and usage information about transforms. + Returns information about both on-going and latest completed Segment + Replication events. - :arg transform_id: The id of the transform for which to get - stats. '_all' or '*' implies all transforms - :arg allow_no_match: Whether to ignore if a wildcard expression - matches no transforms. (This includes `_all` string or when no - transforms have been specified) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg from_: skips a number of transform configs, defaults to 0 - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. (default: false) + :arg bytes: The unit in which to display byte values. Valid + choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. (default: false) + :arg detailed: If `true`, the response includes detailed + information about segment replications. (default: false) + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. (default: false) :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: specifies a max number of transforms to get, defaults - to 100 - :arg time: The unit in which to display time values Valid + to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + :arg v: Verbose mode. Display column headers. (default: false) """ - # from is a reserved word so it cannot be used, use from_ instead - if "from_" in params: - params["from"] = params.pop("from_") - return self.transport.perform_request( "GET", - _make_path("_cat", "transforms", transform_id), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) diff --git a/opensearchpy/client/cat.pyi b/opensearchpy/client/cat.pyi index e29e9253..fc076ef8 100644 --- a/opensearchpy/client/cat.pyi +++ b/opensearchpy/client/cat.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -57,12 +66,12 @@ class CatClient(NamespacedClient): *, node_id: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -143,6 +152,7 @@ class CatClient(NamespacedClient): *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., @@ -151,7 +161,6 @@ class CatClient(NamespacedClient): include_unloaded_segments: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pri: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., @@ -171,12 +180,12 @@ class CatClient(NamespacedClient): def master( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -194,11 +203,12 @@ class CatClient(NamespacedClient): def cluster_manager( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -217,14 +227,13 @@ class CatClient(NamespacedClient): self, *, bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., full_id: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -270,12 +279,12 @@ class CatClient(NamespacedClient): *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -296,9 +305,11 @@ class CatClient(NamespacedClient): *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -316,12 +327,12 @@ class CatClient(NamespacedClient): def pending_tasks( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -341,12 +352,12 @@ class CatClient(NamespacedClient): self, *, thread_pool_patterns: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., v: Optional[Any] = ..., @@ -387,13 +398,12 @@ class CatClient(NamespacedClient): def plugins( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., - include_bootstrap: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -411,12 +421,12 @@ class CatClient(NamespacedClient): def nodeattrs( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -434,12 +444,12 @@ class CatClient(NamespacedClient): def repositories( self, *, + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -458,12 +468,12 @@ class CatClient(NamespacedClient): self, *, repository: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., @@ -508,12 +518,12 @@ class CatClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -528,17 +538,52 @@ class CatClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def transforms( + def all_pit_segments( self, *, - transform_id: Optional[Any] = ..., - allow_no_match: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def pit_segments( + self, + *, + body: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def segment_replication( + self, + *, + index: Optional[Any] = ..., + active_only: Optional[Any] = ..., + bytes: Optional[Any] = ..., + completed_only: Optional[Any] = ..., + detailed: Optional[Any] = ..., format: Optional[Any] = ..., - from_: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[Any] = ..., s: Optional[Any] = ..., - size: Optional[Any] = ..., + shards: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[Any] = ..., pretty: Optional[bool] = ..., From fce224915820dce7d079aad1317a341ea20c718f Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Mon, 9 Oct 2023 14:57:09 -0700 Subject: [PATCH 23/80] Generate cluster client from API specs (#530) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/client/cluster.py | 367 +++++++++++++++++-------- opensearchpy/_async/client/cluster.pyi | 130 ++++++++- opensearchpy/client/cluster.py | 367 +++++++++++++++++-------- opensearchpy/client/cluster.pyi | 130 ++++++++- utils/generate-api.py | 6 + 6 files changed, 765 insertions(+), 236 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa812ec5..e412cc18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) - Generate `dangling_indices` client from API specs ([#511](https://github.com/opensearch-project/opensearch-py/pull/511)) +- Generate `cluster` client from API specs ([#530](https://github.com/opensearch-project/opensearch-py/pull/530)) - Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) - Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) ### Deprecated diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 174245cb..b64bdc5b 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -25,16 +25,27 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ClusterClient(NamespacedClient): @query_params( + "awareness_attribute", + "cluster_manager_timeout", "expand_wildcards", "level", "local", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", "wait_for_events", @@ -48,31 +59,35 @@ async def health(self, index=None, params=None, headers=None): Returns basic information about the health of the cluster. - :arg index: Limit the information returned to a specific index + :arg index: Limit the information returned to specific indicies. + :arg awareness_attribute: The awareness attribute for which the + health is required. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg level: Specify the level of detail for returned information - Valid choices: cluster, indices, shards Default: cluster + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg level: Specify the level of detail for returned + information. Valid choices: cluster, indices, shards, + awareness_attributes :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Wait until the specified number of - shards is active + shards is active. :arg wait_for_events: Wait until all currently queued events - with the given priority are processed Valid choices: immediate, urgent, - high, normal, low, languid + with the given priority are processed. Valid choices: immediate, + urgent, high, normal, low, languid :arg wait_for_no_initializing_shards: Whether to wait until - there are no initializing shards in the cluster + there are no initializing shards in the cluster. :arg wait_for_no_relocating_shards: Whether to wait until there - are no relocating shards in the cluster + are no relocating shards in the cluster. :arg wait_for_nodes: Wait until the specified number of nodes is - available - :arg wait_for_status: Wait until cluster is in a specific state + available. + :arg wait_for_status: Wait until cluster is in a specific state. Valid choices: green, yellow, red """ return await self.transport.perform_request( @@ -82,17 +97,20 @@ async def health(self, index=None, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") async def pending_tasks(self, params=None, headers=None): """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ return await self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers @@ -100,12 +118,12 @@ async def pending_tasks(self, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", "wait_for_metadata_version", "wait_for_timeout", ) @@ -115,28 +133,31 @@ async def state(self, metric=None, index=None, params=None, headers=None): :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, blocks, metadata, nodes, routing_table, - routing_nodes, master_node, version - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + metrics. Valid choices: _all, blocks, metadata, nodes, routing_table, + routing_nodes, master_node, cluster_manager_node, version + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg flat_settings: Return settings in flat format. (default: false) :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg wait_for_metadata_version: Wait for the metadata version to - be equal or greater than the specified metadata version + be equal or greater than the specified metadata version. :arg wait_for_timeout: The maximum time to wait for - wait_for_metadata_version before timing out + wait_for_metadata_version before timing out. """ if index and metric in SKIP_IN_PATH: metric = "_all" @@ -154,13 +175,13 @@ async def stats(self, node_id=None, params=None, headers=None): Returns high-level overview of cluster statistics. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg flat_settings: Return settings in flat format (default: + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg flat_settings: Return settings in flat format. (default: false) - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", @@ -172,10 +193,10 @@ async def stats(self, node_id=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "explain", "master_timeout", - "cluster_manager_timeout", "metric", "retry_failed", "timeout", @@ -187,30 +208,30 @@ async def reroute(self, body=None, params=None, headers=None): :arg body: The definition of `commands` to perform (`move`, `cancel`, `allocate`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: Simulate the operation only and return the - resulting state + resulting state. :arg explain: Return an explanation of why the commands can or - cannot be executed - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + cannot be executed. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg metric: Limit the information returned to the specified - metrics. Defaults to all but metadata Valid choices: _all, blocks, - metadata, nodes, routing_table, master_node, version + metrics. Defaults to all but metadata. :arg retry_failed: Retries allocation of shards that are blocked - due to too many subsequent allocation failures - :arg timeout: Explicit operation timeout + due to too many subsequent allocation failures. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "POST", "/_cluster/reroute", params=params, headers=headers, body=body ) @query_params( + "cluster_manager_timeout", "flat_settings", "include_defaults", "master_timeout", - "cluster_manager_timeout", "timeout", ) async def get_settings(self, params=None, headers=None): @@ -218,22 +239,23 @@ async def get_settings(self, params=None, headers=None): Returns cluster settings. - :arg flat_settings: Return settings in flat format (default: + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. (default: false) :arg include_defaults: Whether to return all default clusters - setting. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + setting. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", "/_cluster/settings", params=params, headers=headers ) @query_params( - "flat_settings", "master_timeout", "cluster_manager_timeout", "timeout" + "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) async def put_settings(self, body, params=None, headers=None): """ @@ -242,13 +264,14 @@ async def put_settings(self, body, params=None, headers=None): :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). - :arg flat_settings: Return settings in flat format (default: + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -276,9 +299,9 @@ async def allocation_explain(self, body=None, params=None, headers=None): :arg body: The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard' :arg include_disk_info: Return information about disk usage and - shard sizes (default: false) + shard sizes. (default: false) :arg include_yes_decisions: Return 'YES' decisions in - explanation (default: false) + explanation. (default: false) """ return await self.transport.perform_request( "POST", @@ -288,16 +311,19 @@ async def allocation_explain(self, body=None, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def delete_component_template(self, name, params=None, headers=None): """ - Deletes a component template + Deletes a component template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -309,19 +335,20 @@ async def delete_component_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") async def get_component_template(self, name=None, params=None, headers=None): """ - Returns one or more component templates + Returns one or more component templates. - :arg name: The comma separated names of the component templates + :arg name: The Comma-separated names of the component templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ return await self.transport.perform_request( "GET", @@ -330,19 +357,22 @@ async def get_component_template(self, name=None, params=None, headers=None): headers=headers, ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") async def put_component_template(self, name, body, params=None, headers=None): """ - Creates or updates a component template + Creates or updates a component template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + new or can also replace an existing one. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -356,19 +386,18 @@ async def put_component_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("local", "master_timeout") async def exists_component_template(self, name, params=None, headers=None): """ - Returns information about whether a particular component template exist + Returns information about whether a particular component template exist. - :arg name: The name of the template + :arg name: The name of the template. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -388,7 +417,7 @@ async def delete_voting_config_exclusions(self, params=None, headers=None): :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting - configuration exclusions list. Default: True + configuration exclusions list. (default: True) """ return await self.transport.perform_request( "DELETE", @@ -403,14 +432,136 @@ async def post_voting_config_exclusions(self, params=None, headers=None): Updates the cluster voting config exclusions by node ids or node names. - :arg node_ids: A comma-separated list of the persistent ids of - the nodes to exclude from the voting configuration. If specified, you - may not also specify ?node_names. - :arg node_names: A comma-separated list of the names of the + :arg node_ids: Comma-separated list of the persistent ids of the nodes to exclude from the voting configuration. If specified, you may - not also specify ?node_ids. - :arg timeout: Explicit operation timeout Default: 30s + not also specify ?node_names. + :arg node_names: Comma-separated list of the names of the nodes + to exclude from the voting configuration. If specified, you may not also + specify ?node_ids. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "POST", "/_cluster/voting_config_exclusions", params=params, headers=headers ) + + @query_params() + async def delete_decommission_awareness(self, params=None, headers=None): + """ + Delete any existing decommission. + + """ + return await self.transport.perform_request( + "DELETE", + "/_cluster/decommission/awareness/", + params=params, + headers=headers, + ) + + @query_params() + async def delete_weighted_routing(self, params=None, headers=None): + """ + Delete weighted shard routing weights. + + """ + return await self.transport.perform_request( + "DELETE", + "/_cluster/routing/awareness/weights", + params=params, + headers=headers, + ) + + @query_params() + async def get_decommission_awareness( + self, awareness_attribute_name, params=None, headers=None + ): + """ + Get details and status of decommissioned attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + """ + if awareness_attribute_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'awareness_attribute_name'." + ) + + return await self.transport.perform_request( + "GET", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + "_status", + ), + params=params, + headers=headers, + ) + + @query_params() + async def get_weighted_routing(self, attribute, params=None, headers=None): + """ + Fetches weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return await self.transport.perform_request( + "GET", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) + + @query_params() + async def put_decommission_awareness( + self, + awareness_attribute_name, + awareness_attribute_value, + params=None, + headers=None, + ): + """ + Decommissions an awareness attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + :arg awareness_attribute_value: Awareness attribute value. + """ + for param in (awareness_attribute_name, awareness_attribute_value): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "PUT", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + awareness_attribute_value, + ), + params=params, + headers=headers, + ) + + @query_params() + async def put_weighted_routing(self, attribute, params=None, headers=None): + """ + Updates weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return await self.transport.perform_request( + "PUT", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) diff --git a/opensearchpy/_async/client/cluster.pyi b/opensearchpy/_async/client/cluster.pyi index f2cd948c..2685cbb5 100644 --- a/opensearchpy/_async/client/cluster.pyi +++ b/opensearchpy/_async/client/cluster.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -33,11 +42,12 @@ class ClusterClient(NamespacedClient): self, *, index: Optional[Any] = ..., + awareness_attribute: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., level: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_events: Optional[Any] = ..., @@ -61,9 +71,9 @@ class ClusterClient(NamespacedClient): async def pending_tasks( self, *, + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -83,12 +93,12 @@ class ClusterClient(NamespacedClient): metric: Optional[Any] = ..., index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., wait_for_metadata_version: Optional[Any] = ..., wait_for_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -127,10 +137,10 @@ class ClusterClient(NamespacedClient): self, *, body: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., dry_run: Optional[Any] = ..., explain: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., metric: Optional[Any] = ..., retry_failed: Optional[Any] = ..., timeout: Optional[Any] = ..., @@ -150,10 +160,10 @@ class ClusterClient(NamespacedClient): async def get_settings( self, *, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., include_defaults: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -172,9 +182,9 @@ class ClusterClient(NamespacedClient): self, *, body: Any, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -228,8 +238,8 @@ class ClusterClient(NamespacedClient): self, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -248,9 +258,9 @@ class ClusterClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -269,9 +279,9 @@ class ClusterClient(NamespacedClient): name: Any, *, body: Any, + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -292,7 +302,6 @@ class ClusterClient(NamespacedClient): *, local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -342,3 +351,104 @@ class ClusterClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... + async def delete_decommission_awareness( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def delete_weighted_routing( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def get_decommission_awareness( + self, + awareness_attribute_name: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def get_weighted_routing( + self, + attribute: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def put_decommission_awareness( + self, + awareness_attribute_name: Any, + awareness_attribute_value: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def put_weighted_routing( + self, + attribute: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index fd749cbc..28f1f0e8 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -25,16 +25,27 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ClusterClient(NamespacedClient): @query_params( + "awareness_attribute", + "cluster_manager_timeout", "expand_wildcards", "level", "local", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", "wait_for_events", @@ -48,31 +59,35 @@ def health(self, index=None, params=None, headers=None): Returns basic information about the health of the cluster. - :arg index: Limit the information returned to a specific index + :arg index: Limit the information returned to specific indicies. + :arg awareness_attribute: The awareness attribute for which the + health is required. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg level: Specify the level of detail for returned information - Valid choices: cluster, indices, shards Default: cluster + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg level: Specify the level of detail for returned + information. Valid choices: cluster, indices, shards, + awareness_attributes :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Wait until the specified number of - shards is active + shards is active. :arg wait_for_events: Wait until all currently queued events - with the given priority are processed Valid choices: immediate, urgent, - high, normal, low, languid + with the given priority are processed. Valid choices: immediate, + urgent, high, normal, low, languid :arg wait_for_no_initializing_shards: Whether to wait until - there are no initializing shards in the cluster + there are no initializing shards in the cluster. :arg wait_for_no_relocating_shards: Whether to wait until there - are no relocating shards in the cluster + are no relocating shards in the cluster. :arg wait_for_nodes: Wait until the specified number of nodes is - available - :arg wait_for_status: Wait until cluster is in a specific state + available. + :arg wait_for_status: Wait until cluster is in a specific state. Valid choices: green, yellow, red """ return self.transport.perform_request( @@ -82,17 +97,20 @@ def health(self, index=None, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") def pending_tasks(self, params=None, headers=None): """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ return self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers @@ -100,12 +118,12 @@ def pending_tasks(self, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", "wait_for_metadata_version", "wait_for_timeout", ) @@ -115,28 +133,31 @@ def state(self, metric=None, index=None, params=None, headers=None): :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, blocks, metadata, nodes, routing_table, - routing_nodes, master_node, version - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + metrics. Valid choices: _all, blocks, metadata, nodes, routing_table, + routing_nodes, master_node, cluster_manager_node, version + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: + concrete indices that are open, closed or both. Valid choices: all, + open, closed, hidden, none + :arg flat_settings: Return settings in flat format. (default: false) :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg wait_for_metadata_version: Wait for the metadata version to - be equal or greater than the specified metadata version + be equal or greater than the specified metadata version. :arg wait_for_timeout: The maximum time to wait for - wait_for_metadata_version before timing out + wait_for_metadata_version before timing out. """ if index and metric in SKIP_IN_PATH: metric = "_all" @@ -154,13 +175,13 @@ def stats(self, node_id=None, params=None, headers=None): Returns high-level overview of cluster statistics. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg flat_settings: Return settings in flat format (default: + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg flat_settings: Return settings in flat format. (default: false) - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", @@ -172,10 +193,10 @@ def stats(self, node_id=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "explain", "master_timeout", - "cluster_manager_timeout", "metric", "retry_failed", "timeout", @@ -187,30 +208,30 @@ def reroute(self, body=None, params=None, headers=None): :arg body: The definition of `commands` to perform (`move`, `cancel`, `allocate`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: Simulate the operation only and return the - resulting state + resulting state. :arg explain: Return an explanation of why the commands can or - cannot be executed - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + cannot be executed. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. :arg metric: Limit the information returned to the specified - metrics. Defaults to all but metadata Valid choices: _all, blocks, - metadata, nodes, routing_table, master_node, version + metrics. Defaults to all but metadata. :arg retry_failed: Retries allocation of shards that are blocked - due to too many subsequent allocation failures - :arg timeout: Explicit operation timeout + due to too many subsequent allocation failures. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "POST", "/_cluster/reroute", params=params, headers=headers, body=body ) @query_params( + "cluster_manager_timeout", "flat_settings", "include_defaults", "master_timeout", - "cluster_manager_timeout", "timeout", ) def get_settings(self, params=None, headers=None): @@ -218,22 +239,23 @@ def get_settings(self, params=None, headers=None): Returns cluster settings. - :arg flat_settings: Return settings in flat format (default: + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. (default: false) :arg include_defaults: Whether to return all default clusters - setting. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + setting. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", "/_cluster/settings", params=params, headers=headers ) @query_params( - "flat_settings", "master_timeout", "cluster_manager_timeout", "timeout" + "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) def put_settings(self, body, params=None, headers=None): """ @@ -242,13 +264,14 @@ def put_settings(self, body, params=None, headers=None): :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). - :arg flat_settings: Return settings in flat format (default: + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -276,9 +299,9 @@ def allocation_explain(self, body=None, params=None, headers=None): :arg body: The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard' :arg include_disk_info: Return information about disk usage and - shard sizes (default: false) + shard sizes. (default: false) :arg include_yes_decisions: Return 'YES' decisions in - explanation (default: false) + explanation. (default: false) """ return self.transport.perform_request( "POST", @@ -288,16 +311,19 @@ def allocation_explain(self, body=None, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def delete_component_template(self, name, params=None, headers=None): """ - Deletes a component template + Deletes a component template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -309,19 +335,20 @@ def delete_component_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") def get_component_template(self, name=None, params=None, headers=None): """ - Returns one or more component templates + Returns one or more component templates. - :arg name: The comma separated names of the component templates + :arg name: The Comma-separated names of the component templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ return self.transport.perform_request( "GET", @@ -330,19 +357,22 @@ def get_component_template(self, name=None, params=None, headers=None): headers=headers, ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") def put_component_template(self, name, body, params=None, headers=None): """ - Creates or updates a component template + Creates or updates a component template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + new or can also replace an existing one. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. + :arg timeout: Operation timeout. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -356,19 +386,18 @@ def put_component_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("local", "master_timeout") def exists_component_template(self, name, params=None, headers=None): """ - Returns information about whether a particular component template exist + Returns information about whether a particular component template exist. - :arg name: The name of the template + :arg name: The name of the template. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. (default: false) + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead): Operation timeout for connection + to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -388,7 +417,7 @@ def delete_voting_config_exclusions(self, params=None, headers=None): :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting - configuration exclusions list. Default: True + configuration exclusions list. (default: True) """ return self.transport.perform_request( "DELETE", @@ -403,14 +432,136 @@ def post_voting_config_exclusions(self, params=None, headers=None): Updates the cluster voting config exclusions by node ids or node names. - :arg node_ids: A comma-separated list of the persistent ids of - the nodes to exclude from the voting configuration. If specified, you - may not also specify ?node_names. - :arg node_names: A comma-separated list of the names of the + :arg node_ids: Comma-separated list of the persistent ids of the nodes to exclude from the voting configuration. If specified, you may - not also specify ?node_ids. - :arg timeout: Explicit operation timeout Default: 30s + not also specify ?node_names. + :arg node_names: Comma-separated list of the names of the nodes + to exclude from the voting configuration. If specified, you may not also + specify ?node_ids. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "POST", "/_cluster/voting_config_exclusions", params=params, headers=headers ) + + @query_params() + def delete_decommission_awareness(self, params=None, headers=None): + """ + Delete any existing decommission. + + """ + return self.transport.perform_request( + "DELETE", + "/_cluster/decommission/awareness/", + params=params, + headers=headers, + ) + + @query_params() + def delete_weighted_routing(self, params=None, headers=None): + """ + Delete weighted shard routing weights. + + """ + return self.transport.perform_request( + "DELETE", + "/_cluster/routing/awareness/weights", + params=params, + headers=headers, + ) + + @query_params() + def get_decommission_awareness( + self, awareness_attribute_name, params=None, headers=None + ): + """ + Get details and status of decommissioned attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + """ + if awareness_attribute_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'awareness_attribute_name'." + ) + + return self.transport.perform_request( + "GET", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + "_status", + ), + params=params, + headers=headers, + ) + + @query_params() + def get_weighted_routing(self, attribute, params=None, headers=None): + """ + Fetches weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return self.transport.perform_request( + "GET", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) + + @query_params() + def put_decommission_awareness( + self, + awareness_attribute_name, + awareness_attribute_value, + params=None, + headers=None, + ): + """ + Decommissions an awareness attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + :arg awareness_attribute_value: Awareness attribute value. + """ + for param in (awareness_attribute_name, awareness_attribute_value): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return self.transport.perform_request( + "PUT", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + awareness_attribute_value, + ), + params=params, + headers=headers, + ) + + @query_params() + def put_weighted_routing(self, attribute, params=None, headers=None): + """ + Updates weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return self.transport.perform_request( + "PUT", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) diff --git a/opensearchpy/client/cluster.pyi b/opensearchpy/client/cluster.pyi index 49b27c54..ccc3737a 100644 --- a/opensearchpy/client/cluster.pyi +++ b/opensearchpy/client/cluster.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -33,11 +42,12 @@ class ClusterClient(NamespacedClient): self, *, index: Optional[Any] = ..., + awareness_attribute: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., level: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_events: Optional[Any] = ..., @@ -61,9 +71,9 @@ class ClusterClient(NamespacedClient): def pending_tasks( self, *, + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -83,12 +93,12 @@ class ClusterClient(NamespacedClient): metric: Optional[Any] = ..., index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., wait_for_metadata_version: Optional[Any] = ..., wait_for_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -127,10 +137,10 @@ class ClusterClient(NamespacedClient): self, *, body: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., dry_run: Optional[Any] = ..., explain: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., metric: Optional[Any] = ..., retry_failed: Optional[Any] = ..., timeout: Optional[Any] = ..., @@ -150,10 +160,10 @@ class ClusterClient(NamespacedClient): def get_settings( self, *, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., include_defaults: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -172,9 +182,9 @@ class ClusterClient(NamespacedClient): self, *, body: Any, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -228,8 +238,8 @@ class ClusterClient(NamespacedClient): self, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -248,9 +258,9 @@ class ClusterClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -269,9 +279,9 @@ class ClusterClient(NamespacedClient): name: Any, *, body: Any, + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -292,7 +302,6 @@ class ClusterClient(NamespacedClient): *, local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -342,3 +351,104 @@ class ClusterClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... + def delete_decommission_awareness( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def delete_weighted_routing( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_decommission_awareness( + self, + awareness_attribute_name: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_weighted_routing( + self, + attribute: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def put_decommission_awareness( + self, + awareness_attribute_name: Any, + awareness_attribute_value: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def put_weighted_routing( + self, + attribute: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... diff --git a/utils/generate-api.py b/utils/generate-api.py index 2cb93eb5..fffd0e82 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -502,6 +502,12 @@ def read_modules(): if p["x-operation-group"] != "nodes.hot_threads" and "type" in params_new: params_new.pop("type") + if ( + p["x-operation-group"] == "cluster.health" + and "ensure_node_commissioned" in params_new + ): + params_new.pop("ensure_node_commissioned") + if bool(params_new): p.update({"params": params_new}) From 781c0ff24cadfaff6734368772bd6a3504647bd6 Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:11:35 -0700 Subject: [PATCH 24/80] Added new guide & sample module for using index templates. (#531) Added index_template guide and sample Signed-off-by: Djcarrillo6 --- CHANGELOG.md | 1 + USER_GUIDE.md | 1 + guides/index_template.md | 184 ++++++++++++++++++ .../index_template/index_template_sample.py | 143 ++++++++++++++ 4 files changed, 329 insertions(+) create mode 100644 guides/index_template.md create mode 100644 samples/index_template/index_template_sample.py diff --git a/CHANGELOG.md b/CHANGELOG.md index e412cc18..66987ff1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Added - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) +- Added new guide for using index templates with the client ([#531](https://github.com/opensearch-project/opensearch-py/pull/531)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/USER_GUIDE.md b/USER_GUIDE.md index 95ed6f11..fb460019 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -154,6 +154,7 @@ print(response) - [Point in Time](guides/point_in_time.md) - [Using a Proxy](guides/proxy.md) - [Working with Snapshots](guides/snapshot.md) +- [Index Templates](guides/index_template.md) ## Plugins diff --git a/guides/index_template.md b/guides/index_template.md new file mode 100644 index 00000000..3afdd1dc --- /dev/null +++ b/guides/index_template.md @@ -0,0 +1,184 @@ +# Index Template +Index templates are a convenient way to define settings, mappings, and aliases for one or more indices when they are created. In this guide, you'll learn how to create an index template and apply it to an index. + +## Setup + +Assuming you have OpenSearch running locally on port 9200, you can create a client instance with the following code: +```python +from opensearchpy import OpenSearch +client = OpenSearch( + hosts=['https://localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') +) +``` + +## Index Template API Actions + +### Create an Index Template +You can create an index template to define default settings and mappings for indices of certain patterns. The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: + +```python +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + }, + 'mappings': { + 'properties': { + 'title': { 'type': 'text' }, + 'author': { 'type': 'text' }, + 'published_on': { 'type': 'date' }, + 'pages': { 'type': 'integer' } + } + } + } + } +) +``` + +Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. Let's create an index named `books-nonfiction` and verify that its settings and mappings match those of the template: + +```python +client.indices.create(index='books-nonfiction') +print(client.indices.get(index='books-nonfiction')) +``` + +### Multiple Index Templates +If multiple index templates match the index's name, OpenSearch will apply the template with the highest priority. The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: + +```python +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'priority': 0, # default priority + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + } + } + } +) + +client.indices.put_index_template( + name='books-fiction', + body={ + 'index_patterns': ['books-fiction-*'], + 'priority': 1, # higher priority than the `books` template + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 1, + 'number_of_replicas': 1 + } + } + } + } +) +``` + +When we create an index named `books-fiction-romance`, OpenSearch will apply the `books-fiction-*` template's settings to the index: + +```python +client.indices.create(index='books-fiction-romance') +print(client.indices.get(index='books-fiction-romance')) +``` + +### Composable Index Templates +Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: + +```python +client.cluster.put_component_template( + name='books_mappings', + body={ + 'template': { + 'mappings': { + 'properties': { + 'title': { 'type': 'text' }, + 'author': { 'type': 'text' }, + 'published_on': { 'type': 'date' }, + 'pages': { 'type': 'integer' } + } + } + } + } +) + +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'composed_of': ['books_mappings'], # use the `books_mappings` component template + 'priority': 0, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + } + } + } +) + +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'composed_of': ['books_mappings'], # use the `books_mappings` component template + 'priority': 1, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 1, + 'number_of_replicas': 1 + } + } + } + } +) +``` + +When we create an index named `books-fiction-horror`, OpenSearch will apply the `books-fiction-*` template's settings, and `books_mappings` template mappings to the index: + +```python +client.indices.create(index='books-fiction-horror') +print(client.indices.get(index='books-fiction-horror')) +``` + +### Get an Index Template +You can get an index template with the `get_index_template` API action: + +```python +print(client.indices.get_index_template(name='books')) +``` + +### Delete an Index Template +You can delete an index template with the `delete_template` API action: + +```python +client.indices.delete_index_template(name='books') +``` + +## Cleanup +Let's delete all resources created in this guide: + +```python +client.indices.delete(index='books-*') +client.indices.delete_index_template(name='books-fiction') +client.cluster.delete_component_template(name='books_mappings') +``` + +# Sample Code +See [index_template_sample.py](/samples/index_template/index_template_sample.py) for a working sample of the concepts in this guide. \ No newline at end of file diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py new file mode 100644 index 00000000..dab504be --- /dev/null +++ b/samples/index_template/index_template_sample.py @@ -0,0 +1,143 @@ +from opensearchpy import OpenSearch + +# Create a client instance +client = OpenSearch( + hosts=['https://localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') +) + +# You can create an index template to define default settings and mappings for indices of certain patterns. The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: +client.indices.put_index_template( +name='books', +body={ + 'index_patterns': ['books-*'], + 'priority': 1, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + }, + 'mappings': { + 'properties': { + 'title': { 'type': 'text' }, + 'author': { 'type': 'text' }, + 'published_on': { 'type': 'date' }, + 'pages': { 'type': 'integer' } + } + } + } +} +) + +# Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: +client.indices.create(index='books-nonfiction') +print(client.indices.get(index='books-nonfiction')) + +# If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: +client.indices.put_index_template( +name='books', +body={ + 'index_patterns': ['books-*'], + 'priority': 1, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + } + } +} +) + +client.indices.put_index_template( +name='books-fiction', +body={ + 'index_patterns': ['books-fiction-*'], + 'priority': 2, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 1, + 'number_of_replicas': 1 + } + } + } +} +) + +# # Test multiple index templates +client.indices.create(index='books-fiction-romance') +print(client.indices.get(index='books-fiction-romance')) + + +# Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: +client.cluster.put_component_template( +name='books_mappings', +body={ + 'template': { + 'mappings': { + 'properties': { + 'title': { 'type': 'text' }, + 'author': { 'type': 'text' }, + 'published_on': { 'type': 'date' }, + 'pages': { 'type': 'integer' } + } + } + } +} +) + +client.indices.put_index_template( +name='books', +body={ + 'index_patterns': ['books-*'], + 'composed_of': ['books_mappings'], + 'priority': 4, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + } + } +} +) + +client.indices.put_index_template( +name='books-fiction', +body={ + 'index_patterns': ['books-fiction-*'], + 'composed_of': ['books_mappings'], + 'priority': 5, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 1, + 'number_of_replicas': 1 + } + } + } +} +) + + +# Test composable index templates +client.indices.create(index='books-fiction-horror') +print(client.indices.get(index='books-fiction-horror')) + +# Get an index template +print(client.indices.get_index_template(name='books')) + +# Delete an index template +client.indices.delete_index_template(name='books') + +# Cleanup +client.indices.delete(index='books-*') +client.indices.delete_index_template(name='books-fiction') +client.cluster.delete_component_template(name='books_mappings') \ No newline at end of file From ef898dc17d8eda5bb7a29da60e7196eae73c657d Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Thu, 12 Oct 2023 14:11:21 -0700 Subject: [PATCH 25/80] Removed EOL Python3.5 & bumped urllib3 version to patch security vulnerability (#533) Updated CHANGELOG with pull # Updated CHANGELOG with pull # Updated CHANGELOG removed section. Updated CHANGELOG removed section again Signed-off-by: Djcarrillo6 Signed-off-by: roma2023 --- .github/workflows/test.yml | 1 - CHANGELOG.md | 2 ++ noxfile.py | 2 +- setup.py | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3287c11b..bd0ac738 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,6 @@ jobs: strategy: matrix: entry: - - { os: 'ubuntu-20.04', python-version: "3.5" } - { os: 'ubuntu-20.04', python-version: "3.6" } - { os: 'ubuntu-latest', python-version: "3.7" } - { os: 'ubuntu-latest', python-version: "3.8" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 66987ff1..8ba684ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,6 +40,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Fixed race condition in AWSV4SignerAuth & AWSV4SignerAsyncAuth when using refreshable credentials ([#470](https://github.com/opensearch-project/opensearch-py/pull/470)) ### Security ### Dependencies +- Bumps `urllib3` from >= 1.26.9 to >= 1.26.17 [#533](https://github.com/opensearch-project/opensearch-py/pull/533) ## [2.3.0] ### Added @@ -67,6 +68,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Deprecated ### Removed - Removed support for Python 2.7 ([#421](https://github.com/opensearch-project/opensearch-py/pull/421)) +- Removed support for Python 3.5 ([#533](https://github.com/opensearch-project/opensearch-py/pull/533)) ### Fixed - Fixed flaky CI tests by replacing httpbin with a simple http_server ([#395](https://github.com/opensearch-project/opensearch-py/pull/395)) - Fixed import cycle when importing async helpers ([#311](https://github.com/opensearch-project/opensearch-py/pull/311)) diff --git a/noxfile.py b/noxfile.py index a5da2b60..80b4e400 100644 --- a/noxfile.py +++ b/noxfile.py @@ -36,7 +36,7 @@ ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) +@nox.session(python=["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) def test(session): session.install(".") session.install("-r", "dev-requirements.txt") diff --git a/setup.py b/setup.py index c21e053f..8bde5f40 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,7 @@ if package == module_dir or package.startswith(module_dir + ".") ] install_requires = [ - "urllib3>=1.26.9", + "urllib3>=1.26.17", "requests>=2.4.0, <3.0.0", "six", "python-dateutil", From ea219a39fbc4d576567e00af6d72a1f39a6192b8 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 12 Oct 2023 18:55:31 -0400 Subject: [PATCH 26/80] Align pool_maxsize for different connection pool implementations. (#535) * Align pool_maxsize for different connection pool implementations. Signed-off-by: dblock * Document connection classes and settings. Signed-off-by: dblock * Undo change in async for backwards compatibility. Signed-off-by: dblock * Fix: typo. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + USER_GUIDE.md | 1 + guides/auth.md | 1 - guides/connection_classes.md | 81 +++++++++++++++++++ guides/proxy.md | 1 - opensearchpy/connection/http_urllib3.py | 9 ++- opensearchpy/transport.py | 6 ++ .../test_client/test_requests.py | 32 ++++++++ test_opensearchpy/test_client/test_urllib3.py | 39 +++++++++ 9 files changed, 166 insertions(+), 5 deletions(-) create mode 100644 guides/connection_classes.md create mode 100644 test_opensearchpy/test_client/test_requests.py create mode 100644 test_opensearchpy/test_client/test_urllib3.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ba684ab..01e890a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) - Added new guide for using index templates with the client ([#531](https://github.com/opensearch-project/opensearch-py/pull/531)) +- Added `pool_maxsize` for `Urllib3HttpConnection` ([#535](https://github.com/opensearch-project/opensearch-py/pull/535)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/USER_GUIDE.md b/USER_GUIDE.md index fb460019..28005e05 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -155,6 +155,7 @@ print(response) - [Using a Proxy](guides/proxy.md) - [Working with Snapshots](guides/snapshot.md) - [Index Templates](guides/index_template.md) +- [Connection Classes](guides/connection_classes.md) ## Plugins diff --git a/guides/auth.md b/guides/auth.md index 4b314764..3e7f4092 100644 --- a/guides/auth.md +++ b/guides/auth.md @@ -113,7 +113,6 @@ client = OpenSearch( ['htps://...'], use_ssl=True, verify_certs=True, - connection_class=RequestsHttpConnection, http_auth=HTTPKerberosAuth(mutual_authentication=OPTIONAL) ) diff --git a/guides/connection_classes.md b/guides/connection_classes.md new file mode 100644 index 00000000..da7357fb --- /dev/null +++ b/guides/connection_classes.md @@ -0,0 +1,81 @@ +- [Connection Classes](#connection-classes) + - [Selecting a Connection Class](#selecting-a-connection-class) + - [Urllib3HttpConnection](#urllib3httpconnection) + - [RequestsHttpConnection](#requestshttpconnection) + - [AsyncHttpConnection](#asynchttpconnection) + - [Connection Pooling](#connection-pooling) + +# Connection Classes + +The OpenSearch Python synchronous client supports both the `Urllib3HttpConnection` connection class (default) from the [urllib3](https://pypi.org/project/urllib3/) library, and `RequestsHttpConnection` from the [requests](https://pypi.org/project/requests/) library. We recommend you use the default, unless your application is standardized on `requests`. + +The faster, asynchronous client, implements a class called `AsyncHttpConnection`, which uses [aiohttp](https://pypi.org/project/aiohttp/). + +## Selecting a Connection Class + +### Urllib3HttpConnection + +```python +from opensearchpy import OpenSearch, Urllib3HttpConnection + +client = OpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + connection_class = Urllib3HttpConnection +) +``` + +### RequestsHttpConnection + +```python +from opensearchpy import OpenSearch, RequestsHttpConnection + +client = OpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + connection_class = RequestsHttpConnection +) +``` + +### AsyncHttpConnection + +```python +from opensearchpy import AsyncOpenSearch, AsyncHttpConnection + +async def main(): + client = AsyncOpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + connection_class = AsyncHttpConnection + ) +``` + +## Connection Pooling + +The OpenSearch Python client has a connection pool for each `host` value specified during initialization, and a connection pool for HTTP connections to each host implemented in the underlying HTTP libraries. You can adjust the max size of the latter connection pool with `pool_maxsize`. + +If you don't set this value, each connection library implementation will provide its default, which is typically `10`. Changing the pool size may improve performance in some multithreaded scenarios. + +The following example sets the number of connections in the connection pool to 12. + +```python +from opensearchpy import OpenSearch + +client = OpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + pool_maxsize = 12, +) +``` \ No newline at end of file diff --git a/guides/proxy.md b/guides/proxy.md index 5be7edf4..96b7d441 100644 --- a/guides/proxy.md +++ b/guides/proxy.md @@ -13,7 +13,6 @@ OpenSearch( hosts=["htps://..."], use_ssl=True, verify_certs=True, - connection_class=RequestsHttpConnection, trust_env=True, ) ``` diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index 6fc09e72..4bc27bbb 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -86,7 +86,7 @@ class Urllib3HttpConnection(Connection): ``ssl`` module for exact options for your environment). :arg ssl_assert_hostname: use hostname verification if not `False` :arg ssl_assert_fingerprint: verify the supplied certificate fingerprint if not `None` - :arg maxsize: the number of connections which will be kept open to this + :arg pool_maxsize: the number of connections which will be kept open to this host. See https://urllib3.readthedocs.io/en/1.4/pools.html#api for more information. :arg headers: any custom http headers to be add to requests @@ -109,7 +109,7 @@ def __init__( ssl_version=None, ssl_assert_hostname=None, ssl_assert_fingerprint=None, - maxsize=10, + pool_maxsize=None, headers=None, ssl_context=None, http_compress=None, @@ -203,8 +203,11 @@ def __init__( if not ssl_show_warn: urllib3.disable_warnings() + if pool_maxsize and isinstance(pool_maxsize, int): + kw["maxsize"] = pool_maxsize + self.pool = pool_class( - self.hostname, port=self.port, timeout=self.timeout, maxsize=maxsize, **kw + self.hostname, port=self.port, timeout=self.timeout, **kw ) def perform_request( diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index 32c9baf4..301955df 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -83,6 +83,7 @@ def __init__( serializers=None, default_mimetype="application/json", max_retries=3, + pool_maxsize=None, retry_on_status=(502, 503, 504), retry_on_timeout=False, send_get_body_as="GET", @@ -120,6 +121,8 @@ def __init__( don't support passing bodies with GET requests. If you set this to 'POST' a POST method will be used instead, if to 'source' then the body will be serialized and passed as a query parameter `source`. + :arg pool_maxsize: Maximum connection pool size used by pool-manager + For custom connection-pooling on current session Any extra keyword arguments will be passed to the `connection_class` when creating and instance unless overridden by that connection's @@ -139,6 +142,7 @@ def __init__( self.deserializer = Deserializer(_serializers, default_mimetype) self.max_retries = max_retries + self.pool_maxsize = pool_maxsize self.retry_on_timeout = retry_on_timeout self.retry_on_status = retry_on_status self.send_get_body_as = send_get_body_as @@ -211,6 +215,8 @@ def _create_connection(host): # previously unseen params, create new connection kwargs = self.kwargs.copy() kwargs.update(host) + if self.pool_maxsize and isinstance(self.pool_maxsize, int): + kwargs["pool_maxsize"] = self.pool_maxsize return self.connection_class(**kwargs) connections = map(_create_connection, hosts) diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py new file mode 100644 index 00000000..11434a17 --- /dev/null +++ b/test_opensearchpy/test_client/test_requests.py @@ -0,0 +1,32 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from unittest import TestCase + +from opensearchpy import OpenSearch, RequestsHttpConnection + + +class TestRequests(TestCase): + def test_connection_class(self): + client = OpenSearch(connection_class=RequestsHttpConnection) + self.assertEqual(client.transport.pool_maxsize, None) + self.assertEqual(client.transport.connection_class, RequestsHttpConnection) + self.assertIsInstance( + client.transport.connection_pool.connections[0], RequestsHttpConnection + ) + + def test_pool_maxsize(self): + client = OpenSearch(connection_class=RequestsHttpConnection, pool_maxsize=42) + self.assertEqual(client.transport.pool_maxsize, 42) + self.assertEqual( + client.transport.connection_pool.connections[0] + .session.adapters["https://"] + ._pool_maxsize, + 42, + ) diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py new file mode 100644 index 00000000..227164eb --- /dev/null +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -0,0 +1,39 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from unittest import TestCase + +from urllib3.connectionpool import HTTPConnectionPool + +from opensearchpy import OpenSearch, Urllib3HttpConnection + + +class TestUrlLib3(TestCase): + def test_default(self): + client = OpenSearch() + self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) + self.assertEqual(client.transport.pool_maxsize, None) + + def test_connection_class(self): + client = OpenSearch(connection_class=Urllib3HttpConnection) + self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) + self.assertIsInstance( + client.transport.connection_pool.connections[0], Urllib3HttpConnection + ) + self.assertIsInstance( + client.transport.connection_pool.connections[0].pool, HTTPConnectionPool + ) + + def test_pool_maxsize(self): + client = OpenSearch(connection_class=Urllib3HttpConnection, pool_maxsize=42) + self.assertEqual(client.transport.pool_maxsize, 42) + # https://github.com/python/cpython/blob/3.12/Lib/queue.py#L35 + self.assertEqual( + client.transport.connection_pool.connections[0].pool.pool.maxsize, 42 + ) From af0ae8706928db2fbce44210a96a1f367b18be17 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 13 Oct 2023 14:29:54 -0400 Subject: [PATCH 27/80] Add micro benchmarks. (#537) * Align pool_maxsize for different connection pool implementations. Signed-off-by: dblock * Added benchmarks. Signed-off-by: dblock * Multi-threaded vs. async benchmarks. Signed-off-by: dblock * Set pool size to the number of threads. Signed-off-by: dblock * Added sync/async benchmark. Signed-off-by: dblock * Report client-side latency. Signed-off-by: dblock * Various updates to benchmarks, demonstrating threading improves throughput. Signed-off-by: dblock * Bench info. Signed-off-by: dblock * Fixup format. Signed-off-by: dblock * Undo async maxsize. Signed-off-by: dblock * Moved benchmarks folder. Signed-off-by: dblock * Updated documentation and project description. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + README.md | 2 +- benchmarks/README.md | 63 ++ benchmarks/bench_async.py | 101 +++ benchmarks/bench_info_sync.py | 93 +++ benchmarks/bench_sync.py | 132 ++++ benchmarks/bench_sync_async.py | 12 + benchmarks/poetry.lock | 847 +++++++++++++++++++++++++ benchmarks/poetry.toml | 2 + benchmarks/pyproject.toml | 16 + benchmarks/thread_with_return_value.py | 25 + 11 files changed, 1293 insertions(+), 1 deletion(-) create mode 100644 benchmarks/README.md create mode 100644 benchmarks/bench_async.py create mode 100644 benchmarks/bench_info_sync.py create mode 100644 benchmarks/bench_sync.py create mode 100644 benchmarks/bench_sync_async.py create mode 100644 benchmarks/poetry.lock create mode 100644 benchmarks/poetry.toml create mode 100644 benchmarks/pyproject.toml create mode 100644 benchmarks/thread_with_return_value.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 01e890a1..768bdca9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) - Added new guide for using index templates with the client ([#531](https://github.com/opensearch-project/opensearch-py/pull/531)) - Added `pool_maxsize` for `Urllib3HttpConnection` ([#535](https://github.com/opensearch-project/opensearch-py/pull/535)) +- Added benchmarks ([#537](https://github.com/opensearch-project/opensearch-py/pull/537)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/README.md b/README.md index 7ecaea56..e4524469 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ For more information, see [opensearch.org](https://opensearch.org/) and the [API ## User Guide -To get started with the OpenSearch Python Client, see [User Guide](https://github.com/opensearch-project/opensearch-py/blob/main/USER_GUIDE.md). +To get started with the OpenSearch Python Client, see [User Guide](https://github.com/opensearch-project/opensearch-py/blob/main/USER_GUIDE.md). This repository also contains [working samples](https://github.com/opensearch-project/opensearch-py/tree/main/samples) and [benchmarks](https://github.com/opensearch-project/opensearch-py/tree/main/benchmarks). ## Compatibility with OpenSearch diff --git a/benchmarks/README.md b/benchmarks/README.md new file mode 100644 index 00000000..1d21d851 --- /dev/null +++ b/benchmarks/README.md @@ -0,0 +1,63 @@ +- [Benchmarks](#benchmarks) + - [Start OpenSearch](#start-opensearch) + - [Install Prerequisites](#install-prerequisites) + - [Run Benchmarks](#run-benchmarks) + +## Benchmarks + +Python client benchmarks using [richbench](https://github.com/tonybaloney/rich-bench). + +### Start OpenSearch + +``` +docker run -p 9200:9200 -e "discovery.type=single-node" opensearchproject/opensearch:latest +``` + +### Install Prerequisites + +Install [poetry](https://python-poetry.org/docs/), then install package dependencies. + +``` +poetry install +``` + +Benchmarks use the code in this repository by specifying the dependency as `opensearch-py = { path = "..", develop=true, extras=["async"] }` in [pyproject.toml](pyproject.toml). + +### Run Benchmarks + +Run all benchmarks available as follows. + +``` +poetry run richbench . --repeat 1 --times 1 +``` + +Outputs results from all the runs. + +``` + Benchmarks, repeat=1, number=1 +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓ +┃ Benchmark ┃ Min ┃ Max ┃ Mean ┃ Min (+) ┃ Max (+) ┃ Mean (+) ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩ +│ 1 client vs. more clients (async) │ 1.640 │ 1.640 │ 1.640 │ 1.102 (1.5x) │ 1.102 (1.5x) │ 1.102 (1.5x) │ +│ 1 thread vs. 32 threads (sync) │ 5.526 │ 5.526 │ 5.526 │ 1.626 (3.4x) │ 1.626 (3.4x) │ 1.626 (3.4x) │ +│ 1 thread vs. 32 threads (sync) │ 4.639 │ 4.639 │ 4.639 │ 3.363 (1.4x) │ 3.363 (1.4x) │ 3.363 (1.4x) │ +│ sync vs. async (8) │ 3.198 │ 3.198 │ 3.198 │ 0.966 (3.3x) │ 0.966 (3.3x) │ 0.966 (3.3x) │ +└───────────────────────────────────┴─────────┴─────────┴─────────┴─────────────────┴─────────────────┴─────────────────┘ +``` + +Run a specific benchmark, e.g. [bench_sync.py](bench_sync.py) by specifying `--benchmark [name]`. + +``` +poetry run richbench . --repeat 1 --times 1 --benchmark sync +``` + +Outputs results from one benchmark. + +``` + Benchmarks, repeat=1, number=1 +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓ +┃ Benchmark ┃ Min ┃ Max ┃ Mean ┃ Min (+) ┃ Max (+) ┃ Mean (+) ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩ +│ 1 thread vs. 32 threads (sync) │ 6.804 │ 6.804 │ 6.804 │ 3.409 (2.0x) │ 3.409 (2.0x) │ 3.409 (2.0x) │ +└────────────────────────────────┴─────────┴─────────┴─────────┴─────────────────┴─────────────────┴─────────────────┘ +``` diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py new file mode 100644 index 00000000..d08ca634 --- /dev/null +++ b/benchmarks/bench_async.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import asyncio +import uuid + +from opensearchpy import AsyncHttpConnection, AsyncOpenSearch + +host = "localhost" +port = 9200 +auth = ("admin", "admin") +index_name = "test-index-async" +item_count = 100 + + +async def index_records(client, item_count): + await asyncio.gather( + *[ + client.index( + index=index_name, + body={ + "title": f"Moneyball", + "director": "Bennett Miller", + "year": "2011", + }, + id=uuid.uuid4(), + ) + for j in range(item_count) + ] + ) + + +async def test_async(client_count=1, item_count=1): + clients = [] + for i in range(client_count): + clients.append( + AsyncOpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + connection_class=AsyncHttpConnection, + pool_maxsize=client_count, + ) + ) + + if await clients[0].indices.exists(index_name): + await clients[0].indices.delete(index_name) + + await clients[0].indices.create(index_name) + + await asyncio.gather( + *[index_records(clients[i], item_count) for i in range(client_count)] + ) + + await clients[0].indices.refresh(index=index_name) + print(await clients[0].count(index=index_name)) + + await clients[0].indices.delete(index_name) + + await asyncio.gather(*[client.close() for client in clients]) + + +def test(item_count=1, client_count=1): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + loop.run_until_complete(test_async(item_count, client_count)) + loop.close() + + +def test_1(): + test(1, 32 * item_count) + + +def test_2(): + test(2, 16 * item_count) + + +def test_4(): + test(4, 8 * item_count) + + +def test_8(): + test(8, 4 * item_count) + + +def test_16(): + test(16, 2 * item_count) + + +def test_32(): + test(32, item_count) + + +__benchmarks__ = [(test_1, test_8, "1 client vs. more clients (async)")] diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py new file mode 100644 index 00000000..03e6f998 --- /dev/null +++ b/benchmarks/bench_info_sync.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import logging +import sys +import time + +from thread_with_return_value import ThreadWithReturnValue + +from opensearchpy import OpenSearch + +host = "localhost" +port = 9200 +auth = ("admin", "admin") +request_count = 250 + + +root = logging.getLogger() +# root.setLevel(logging.DEBUG) +# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + +handler = logging.StreamHandler(sys.stdout) +handler.setLevel(logging.DEBUG) +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") +handler.setFormatter(formatter) +root.addHandler(handler) + + +def get_info(client, request_count): + tt = 0 + for n in range(request_count): + start = time.time() * 1000 + rc = client.info() + total_time = time.time() * 1000 - start + tt += total_time + return tt + + +def test(thread_count=1, request_count=1, client_count=1): + clients = [] + for i in range(client_count): + clients.append( + OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + pool_maxsize=thread_count, + ) + ) + + threads = [] + for thread_id in range(thread_count): + thread = ThreadWithReturnValue( + target=get_info, args=[clients[thread_id % len(clients)], request_count] + ) + threads.append(thread) + thread.start() + + latency = 0 + for t in threads: + latency += t.join() + + print(f"latency={latency}") + + +def test_1(): + test(1, 32 * request_count, 1) + + +def test_2(): + test(2, 16 * request_count, 2) + + +def test_4(): + test(4, 8 * request_count, 3) + + +def test_8(): + test(8, 4 * request_count, 8) + + +def test_32(): + test(32, request_count, 32) + + +__benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py new file mode 100644 index 00000000..f20ca9f0 --- /dev/null +++ b/benchmarks/bench_sync.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import json +import time +import uuid + +from thread_with_return_value import ThreadWithReturnValue + +from opensearchpy import OpenSearch, Urllib3HttpConnection + +host = "localhost" +port = 9200 +auth = ("admin", "admin") +index_name = "test-index-sync" +item_count = 1000 + +import logging +import sys + +root = logging.getLogger() +# root.setLevel(logging.DEBUG) +# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + +handler = logging.StreamHandler(sys.stdout) +handler.setLevel(logging.DEBUG) +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") +handler.setFormatter(formatter) +root.addHandler(handler) + + +def index_records(client, item_count): + tt = 0 + for n in range(10): + data = [] + for i in range(item_count): + data.append( + json.dumps({"index": {"_index": index_name, "_id": str(uuid.uuid4())}}) + ) + data.append(json.dumps({"value": i})) + data = "\n".join(data) + + start = time.time() * 1000 + rc = client.bulk(data) + if rc["errors"]: + raise Exception(rc["errors"]) + + server_time = rc["took"] + total_time = time.time() * 1000 - start + + if total_time < server_time: + raise Exception(f"total={total_time} < server={server_time}") + + tt += total_time - server_time + return tt + + +def test(thread_count=1, item_count=1, client_count=1): + clients = [] + for i in range(client_count): + clients.append( + OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + pool_maxsize=thread_count, + connection_class=Urllib3HttpConnection, + ) + ) + + if clients[0].indices.exists(index_name): + clients[0].indices.delete(index_name) + + clients[0].indices.create( + index=index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + threads = [] + for thread_id in range(thread_count): + thread = ThreadWithReturnValue( + target=index_records, args=[clients[thread_id % len(clients)], item_count] + ) + threads.append(thread) + thread.start() + + latency = 0 + for t in threads: + latency += t.join() + + clients[0].indices.refresh(index=index_name) + count = clients[0].count(index=index_name) + + clients[0].indices.delete(index_name) + + print(f"{count}, latency={latency}") + + +def test_1(): + test(1, 32 * item_count, 1) + + +def test_2(): + test(2, 16 * item_count, 2) + + +def test_4(): + test(4, 8 * item_count, 3) + + +def test_8(): + test(8, 4 * item_count, 8) + + +def test_32(): + test(32, item_count, 32) + + +__benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/benchmarks/bench_sync_async.py b/benchmarks/bench_sync_async.py new file mode 100644 index 00000000..5fa97f46 --- /dev/null +++ b/benchmarks/bench_sync_async.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import bench_async +import bench_sync + +__benchmarks__ = [(bench_sync.test_32, bench_async.test_8, "sync vs. async (8)")] diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock new file mode 100644 index 00000000..d4992d68 --- /dev/null +++ b/benchmarks/poetry.lock @@ -0,0 +1,847 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.6" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +optional = false +python-versions = ">=3.5" +files = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, +] + +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "markdown-it-py" +version = "2.2.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.7" +files = [ + {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, + {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" +typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "opensearch-py" +version = "2.3.2" +description = "Python client for OpenSearch" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +files = [] +develop = true + +[package.dependencies] +aiohttp = {version = ">=3,<4", optional = true, markers = "extra == \"async\""} +certifi = ">=2022.12.07" +python-dateutil = "*" +requests = ">=2.4.0,<3.0.0" +six = "*" +urllib3 = ">=1.26.9" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +kerberos = ["requests_kerberos"] + +[package.source] +type = "directory" +url = ".." + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyinstrument" +version = "4.6.0" +description = "Call stack profiler for Python. Shows you why your code is slow!" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:679b5397e3e6c0d6f56df50ba8c683543df4f1f7c1df2e2eb728e275bde2c85b"}, + {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:18479ffa0c922695ba2befab29521b62bfe75debef48d818cea46262cee48a1e"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daba103955d0d0b37b8bc20a4e8cc6477e839ce5984478fcf3f7cee8318e9636"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d93451e9c7650629b0bc12caa7390f81d1a15835c07f7dc170e953d4684ed1e7"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01009a7b58a6f11bf5560c23848ea2881acac974b0841fe5d365ef154baabd6f"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:288ea44da6333dacc77b4ba2149dba3dc1e9fbbebd3d5dc51a66c20839d80ef3"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecc106213146dd90659a1483047b3a1c2e174fb190c0e109234e524a4651e377"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cd8ab30c8dcd1511e9b3b98f601f17f2c5c9df1d28f8298d215c63d68919bdc"}, + {file = "pyinstrument-4.6.0-cp310-cp310-win32.whl", hash = "sha256:40e3656e6ace5a140880bd980a25f6a356c094c36e28ed1bf935d7349a78b1b6"}, + {file = "pyinstrument-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9623fc3fde47ae90ad5014737e37034b4abc3fbfb455b7b56cc095f9037d5af"}, + {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:beaaa3b647b3a4cbd34b71eacaa31e3eb90e1bf53e15ada3ac7e9df09d737239"}, + {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c69ab570609ac93b5f4ab2e5ccbf8add4f69a962b06307eea66ba65b5ad9d38"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5992748a74ec7ff445e4b56b5e316673c34b6cdbd3755111f7c023d8a141f001"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb1ba76c4e912cae159ab9729c7b31bb6d7fe8ed1f0fafce74484a4bb159c240"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:674868ebc3663b01d7d059a6f5cdeff6f18b49e217617720a5d645a6b55ead03"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:514a0ced357ff400988f599b0294d05e3b68468f9ab876f204bf12765f7fdb1b"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ccd1f5b4ad35c734dcf2d08d80b5b37205b4e84aa71fe76f95e43bd30c5eef9"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:611c6cd33f42f19e46d99eeef3b84a47d33fe34cdb0ce6e3635d2ee5038706a3"}, + {file = "pyinstrument-4.6.0-cp311-cp311-win32.whl", hash = "sha256:d20b5cf79bca1b3d425a7362457621741393b1d5ce2d920583541b947bc8a368"}, + {file = "pyinstrument-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ecd8cf03b04dc1b7f151896228993c6aa0fa897cdd517ea127465bc1c826c5b5"}, + {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3d4bed520c0f689a75bca4951f6b7fbad96851e8461086c98e03eb726f8a412a"}, + {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b74745f1d22133da8d4a38dd0c78c02c00154a5b7683bdd5df56a7c7705a979b"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ab698400e8401597e39c4816efa247f2b98c9b4e59e3ec25d534ae6887bd93"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de1a36a083b324dafe5e2880e5e04267a1983beb027f12c3dc361ddbe3acf9af"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8adc4f87d4289c1f04f19451b5133b8e307bd9b08c364c48e007ba663fefbf1b"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08fbc75d3615be6259b7af0c173c7bc48acb6e7bd758678d54eb411ba2903052"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d86fea6ce117bcff642e24208eb573c00d78b4c2934eb9bd5f915751980cc9bd"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23a3b21373e0c8bf0d00dda79989fcab0bb1d30094f7b210d40d2226fe20e141"}, + {file = "pyinstrument-4.6.0-cp312-cp312-win32.whl", hash = "sha256:a498c82d93621c5cf736e4660142ac0c3bbcb7b059bcbd4278a6364037128656"}, + {file = "pyinstrument-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:9116154446b9999f6524e9db29310aee6476a5a471c276928f2b46b6655a2dcc"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704c6d38abef8fca2e1085756c9574ea180f7ac866aab6943b483152c2828c2a"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbebdc11d4fc6f3123c046d84db88c7f605d53247e3f357314d0c5775d1beaf4"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7a7bae4cce5f8d084153857cedbce29ca8274c9924884d0461a5db48619c5d"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03289b10715e261a5c33b267d0a430d1b408f929922fde0a9fd311835c60351b"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7f83544ff9abfacdf64b39498ca3dcd454956e44aedb5f67626b7212291c9160"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:40640f02fe7865540e8a1e51bf7f9d2403e3364c3b7edfdb9dae5eb5596811da"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f3719464888d7303e1081996bc56ab75ef5cdf7ef69ccbb7b29f48eb37d8f8b9"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:46e16de6bd3b74ef01b6457d862fee751515315edb5e9283205e45299a29ac49"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9ded87ae11cb0a95a767c817908833ec0821fe0e81650968b201a031edf4bc15"}, + {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8bf16e459a868d9dbaacff4f0a0acd6ad78ce36f2aceabf21e9fd0c3b6aca0d4"}, + {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb83e445795431c3d867b298c0583ee27717bbc50e5120a4c98575c979ab3ab8"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29072b1be183e173d7b0f12caf29f8717d273afbf34df950f5fa0d98127cd3fb"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09502af2a383c59e5a0d3bebfab7e5845f79122348358e9e52b2b0187db84a44"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23c982eb9c4d2f8fe553dacb9bdc0991170a0998b94c84f75c2a052e8af4c74"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f7a38ef482f2151393e729c5582191e4ab05f0ed1fa56b16c2377ff3129107af"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e983e16c2fdfb752387133380859c3414e119e41c14f39f5f869f29dcf6e995c"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d00c87e5cea48a562d67f0436999463b7989cff2e4c196b0e8ba06d515f191a9"}, + {file = "pyinstrument-4.6.0-cp38-cp38-win32.whl", hash = "sha256:a24c95cabf2ca5d79b62dbc8ff17749768b8aafd777841352f59f4ffd6688782"}, + {file = "pyinstrument-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3d88b66dbbcdc6e4c57bd8574ad9d096cd23285eee0f4a5cf74f0e0df6aa190"}, + {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2bcfec45cdbb9edf6d5853debac4a792de589e621be07a71dc76acb36e144a3a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e790515a22844bbccaa388c7715b037c45a8d0155c4a6f2990659998a8920501"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93a30e0d93633a28d4adcf7d7e2d158d6331809b95c2c4a155da17ea1e43eaa3"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa554eb8ef1c54849dbf480965b073f39b39b517e466ce241808a00398f9742a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e289898c644cbbb61d931bbcb6505e2a279ad1122612c9098bfb0958ebf5764"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20ce0f1612a019888a6b94fa7f1e7862842f0b5219282e3354d5b35aceb363f6"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4935f3cdb9062fceac65c50de76f07e05cf630bd3a9c663fedc9e88b5efe7d7c"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dc9c4577ef4b06ae1592c920d0a4f0f0db587a16f530c629ad93e125bc79ebb7"}, + {file = "pyinstrument-4.6.0-cp39-cp39-win32.whl", hash = "sha256:3ec6b04d8cfb34aec48de7fa77aeb919e8e7e19909740ab7a5553339f6f4c53a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a6d2e5c15f989629fac41536ec2ca1fe81359fadf4dadf2ff24fe96b389f6df"}, + {file = "pyinstrument-4.6.0.tar.gz", hash = "sha256:3e509e879c853dbc5fdc1757f0cfdbf8bee899c80f53d504a7df28898f0fa8ed"}, +] + +[package.extras] +bin = ["click", "nox"] +docs = ["furo (==2021.6.18b36)", "myst-parser (==0.15.1)", "sphinx (==4.2.0)", "sphinxcontrib-programoutput (==0.17)"] +examples = ["django", "numpy"] +test = ["flaky", "greenlet (>=3.0.0a1)", "ipython", "pytest", "pytest-asyncio (==0.12.0)", "sphinx-autobuild (==2021.3.14)", "trio"] +types = ["typing-extensions"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "richbench" +version = "1.0.3" +description = "Richbench, a little benchmarking tool" +optional = false +python-versions = ">=3.6" +files = [ + {file = "richbench-1.0.3-py3-none-any.whl", hash = "sha256:f52651cc0e0069a1355c5ed8cda214cb3f8961e7aaa431e440071d30f62e3e55"}, + {file = "richbench-1.0.3.tar.gz", hash = "sha256:744afa3e78cbd919721042c11f7b7f9d2f546cebb3333d40290c4a0d88791701"}, +] + +[package.dependencies] +pyinstrument = "*" +rich = "*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "urllib3" +version = "2.0.6" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "84953079e0bc825b495f10721d514529becb4fc8eef2b9772562f63b0bd75ef3" diff --git a/benchmarks/poetry.toml b/benchmarks/poetry.toml new file mode 100644 index 00000000..eadfd54b --- /dev/null +++ b/benchmarks/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +create = true \ No newline at end of file diff --git a/benchmarks/pyproject.toml b/benchmarks/pyproject.toml new file mode 100644 index 00000000..c0c82142 --- /dev/null +++ b/benchmarks/pyproject.toml @@ -0,0 +1,16 @@ +[tool.poetry] +name = "package" +version = "0.1.0" +description = "OpenSearch Python client benchmarks." +authors = ["Daniel Doubrovkine "] +license = "Apache 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.7" +opensearch-py = { path = "..", develop=true, extras=["async"] } +richbench = "*" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/benchmarks/thread_with_return_value.py b/benchmarks/thread_with_return_value.py new file mode 100644 index 00000000..fb495656 --- /dev/null +++ b/benchmarks/thread_with_return_value.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +from threading import Thread + + +class ThreadWithReturnValue(Thread): + def __init__( + self, group=None, target=None, name=None, args=(), kwargs={}, Verbose=None + ): + Thread.__init__(self, group, target, name, args, kwargs) + self._return = None + + def run(self): + if self._target is not None: + self._return = self._target(*self._args, **self._kwargs) + + def join(self, *args): + Thread.join(self, *args) + return self._return From 9d7cd4352fc22d352bbaafdf6d6f78f1ab3822ba Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Mon, 16 Oct 2023 08:27:22 -0700 Subject: [PATCH 28/80] Added advanced index actioins guide & sample code file. (#541) Signed-off-by: Djcarrillo6 Signed-off-by: roma2023 --- CHANGELOG.md | 4 +- USER_GUIDE.md | 1 + guides/advanced_index_actions.md | 113 ++++++++++++++++++ .../advanced_index_actions_sample.py | 82 +++++++++++++ 4 files changed, 198 insertions(+), 2 deletions(-) create mode 100644 guides/advanced_index_actions.md create mode 100644 samples/advanced_index_actions/advanced_index_actions_sample.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 768bdca9..768ffeee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,7 +54,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added support for the security plugin ([#399](https://github.com/opensearch-project/opensearch-py/pull/399)) - Supports OpenSearch 2.1.0 - 2.6.0 ([#381](https://github.com/opensearch-project/opensearch-py/pull/381)) - Added `allow_redirects` to `RequestsHttpConnection#perform_request` ([#401](https://github.com/opensearch-project/opensearch-py/pull/401)) -- Enhanced YAML test runner to use OpenSearch `rest-api-spec` YAML tests ([#414](https://github.com/opensearch-project/opensearch-py/pull/414) +- Enhanced YAML test runner to use OpenSearch `rest-api-spec` YAML tests ([#414](https://github.com/opensearch-project/opensearch-py/pull/414)) - Added `Search#collapse` ([#409](https://github.com/opensearch-project/opensearch-py/issues/409)) - Added support for the ISM API ([#398](https://github.com/opensearch-project/opensearch-py/pull/398)) - Added `trust_env` to `AIOHttpConnection` ([#398](https://github.com/opensearch-project/opensearch-py/pull/438)) @@ -152,4 +152,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) [2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 [2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 [2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 -[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 +[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 \ No newline at end of file diff --git a/USER_GUIDE.md b/USER_GUIDE.md index 28005e05..ff26110b 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -155,6 +155,7 @@ print(response) - [Using a Proxy](guides/proxy.md) - [Working with Snapshots](guides/snapshot.md) - [Index Templates](guides/index_template.md) +- [Advanced Index Actions](guides/advanced_index_actions.md) - [Connection Classes](guides/connection_classes.md) ## Plugins diff --git a/guides/advanced_index_actions.md b/guides/advanced_index_actions.md new file mode 100644 index 00000000..a3a0620e --- /dev/null +++ b/guides/advanced_index_actions.md @@ -0,0 +1,113 @@ +# Advanced Index Actions Guide +- [Advanced Index Actions](#advanced-index-actions) + - [Setup](#setup) + - [Api Actions](#api-actions) + - [Clear Index Cache](#clear-index-cache) + - [Flush Index](#flush-index) + - [Refresh Index](#refresh-index) + - [Open or Close Index](#open-or-close-index) + - [Force Merge Index](#force-merge-index) + - [Clone Index](#clone-index) + - [Split Index](#split-index) + - [Cleanup](#cleanup) + + +# Advanced Index Actions +In this guide, we will look at some advanced index actions that are not covered in the [Index Lifecycle](index_lifecycle.md) guide. + +## Setup +Let's create a client instance, and an index named `movies`: + +```python +from opensearchpy import OpenSearch +client = OpenSearch( + hosts=['https://@localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') +) +client.indices.create(index='movies') +``` + +## API Actions +### Clear index cache +You can clear the cache of an index or indices by using the `indices.clear_cache` API action. The following example clears the cache of the `movies` index: + +```python +client.indices.clear_cache(index='movies') +``` + +By default, the `indices.clear_cache` API action clears all types of cache. To clear specific types of cache pass the `query`, `fielddata`, or `request` parameter to the API action: + +```python +client.indices.clear_cache(index='movies', query=True) +client.indices.clear_cache(index='movies', fielddata=True, request=True) +``` + +### Flush index +Sometimes you might want to flush an index or indices to make sure that all data in the transaction log is persisted to the index. To flush an index or indices use the `indices.flush` API action. The following example flushes the `movies` index: + +```python +client.indices.flush(index='movies') +``` + +### Refresh index +You can refresh an index or indices to make sure that all changes are available for search. To refresh an index or indices use the `indices.refresh` API action: + +```python +client.indices.refresh(index='movies') +``` + +### Open or close index +You can close an index to prevent read and write operations on the index. A closed index does not have to maintain certain data structures that an opened index require, reducing the memory and disk space required by the index. The following example closes and reopens the `movies` index: + +```python +client.indices.close(index='movies') +client.indices.open(index='movies') +``` + +### Force merge index +You can force merge an index or indices to reduce the number of segments in the index. This can be useful if you have a large number of small segments in the index. Merging segments reduces the memory footprint of the index. Do note that this action is resource intensive and it is only recommended for read-only indices. The following example force merges the `movies` index: + +```python +client.indices.forcemerge(index='movies') +``` + +### Clone index +You can clone an index to create a new index with the same mappings, data, and MOST of the settings. The source index must be in read-only state for cloning. The following example blocks write operations from `movies` index, clones the said index to create a new index named `movies_clone`, then re-enables write: + +```python +client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': True}}}) +client.indices.clone(index='movies', target='movies_clone') +client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': False}}}) +``` + +### Split index +You can split an index into another index with more primary shards. The source index must be in read-only state for splitting. The following example create the read-only `books` index with 30 routing shards and 5 shards (which is divisible by 30), splits index into `bigger_books` with 10 shards (which is also divisible by 30), then re-enables write: + +```python +client.indices.create( + index='books', + body={ 'settings': { + 'index': { 'number_of_shards': 5, + 'number_of_routing_shards': 30, + 'blocks': { 'write': True } } } }) + +client.indices.split( + index='books', + target='bigger_books', + body={ 'settings': { 'index': { 'number_of_shards': 10 } } }) + +client.indices.put_settings(index='books', body={ 'index': { 'blocks': { 'write': False } } }) +``` + +## Cleanup + +Let's delete all the indices we created in this guide: + +```python +client.indices.delete(index=['movies', 'books', 'movies_clone', 'bigger_books']) +``` + +# Sample Code +See [advanced_index_actions_sample.py](/samples/advanced_index_actions/advanced_index_actions_sample.py) for a working sample of the concepts in this guide. \ No newline at end of file diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py new file mode 100644 index 00000000..391d36b9 --- /dev/null +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -0,0 +1,82 @@ +from opensearchpy import OpenSearch +import time + + +# For cleaner output, comment in the two lines below to disable warnings and informational messages +# import urllib3 +# urllib3.disable_warnings() + + +def test_opensearch_examples(): + # Set up + client = OpenSearch( + hosts=['https://localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') + ) + client.indices.create(index='movies') + print("'movies' index created!") + + # Test Clear Index Cache + client.indices.clear_cache(index='movies') + print("Cache for 'movies' index cleared!") + client.indices.clear_cache(index='movies', query=True) + print("Query cache for 'movies' index cleared!") + client.indices.clear_cache(index='movies', fielddata=True, request=True) + print("Field data and request cache for 'movies' index cleared!") + + # Test Flush Index + client.indices.flush(index='movies') + print("'movies' index flushed!") + + # Test Refresh Index + client.indices.refresh(index='movies') + print("'movies' index refreshed!") + + # Test Close or Open Index + client.indices.close(index='movies') + print("'movies' index closed!") + time.sleep(2) # add sleep to ensure the index has time to close + client.indices.open(index='movies') + print("'movies' index opened!") + + # Test Force Merge Index + client.indices.forcemerge(index='movies') + print("'movies' index force merged!") + + # Test Clone + client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': True}}}) + print("Write operations blocked for 'movies' index!") + time.sleep(2) + client.indices.clone(index='movies', target='movies_clone') + print("'movies' index cloned to 'movies_clone'!") + client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': False}}}) + print("Write operations enabled for 'movies' index!") + + # Test Split + client.indices.create( + index='books', + body={'settings': { + 'index': {'number_of_shards': 5, 'number_of_routing_shards': 30, 'blocks': {'write': True}}}} + ) + print("'books' index created!") + time.sleep(2) # add sleep to ensure the index has time to become read-only + client.indices.split( + index='books', + target='bigger_books', + body={'settings': {'index': {'number_of_shards': 10 }}} + ) + print("'books' index split into 'bigger_books'!") + client.indices.put_settings(index='books', body={'index': {'blocks': {'write': False}}}) + print("Write operations enabled for 'books' index!") + + # Cleanup + client.indices.delete(index=['movies', 'books', 'movies_clone', 'bigger_books']) + print("All indices deleted!") + + + + +if __name__ == "__main__": + test_opensearch_examples() \ No newline at end of file From 48b22d316f09b0149d9db0b771a48608a57e0125 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Mon, 16 Oct 2023 15:32:17 -0400 Subject: [PATCH 29/80] Added a guide on making raw JSON REST requests. (#542) Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 3 +- USER_GUIDE.md | 1 + guides/json.md | 66 +++++++++++++++++++++++++++ samples/json/hello-async.py | 90 +++++++++++++++++++++++++++++++++++++ samples/json/hello.py | 76 +++++++++++++++++++++++++++++++ 5 files changed, 235 insertions(+), 1 deletion(-) create mode 100644 guides/json.md create mode 100755 samples/json/hello-async.py create mode 100755 samples/json/hello.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 768ffeee..1cceae9e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,9 +5,10 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Added - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) -- Added new guide for using index templates with the client ([#531](https://github.com/opensearch-project/opensearch-py/pull/531)) +- Added guide on using index templates ([#531](https://github.com/opensearch-project/opensearch-py/pull/531)) - Added `pool_maxsize` for `Urllib3HttpConnection` ([#535](https://github.com/opensearch-project/opensearch-py/pull/535)) - Added benchmarks ([#537](https://github.com/opensearch-project/opensearch-py/pull/537)) +- Added guide on making raw JSON REST requests ([#542](https://github.com/opensearch-project/opensearch-py/pull/542)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/USER_GUIDE.md b/USER_GUIDE.md index ff26110b..90387b43 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -156,6 +156,7 @@ print(response) - [Working with Snapshots](guides/snapshot.md) - [Index Templates](guides/index_template.md) - [Advanced Index Actions](guides/advanced_index_actions.md) +- [Making Raw JSON REST Requests](guides/json.md) - [Connection Classes](guides/connection_classes.md) ## Plugins diff --git a/guides/json.md b/guides/json.md new file mode 100644 index 00000000..edefa209 --- /dev/null +++ b/guides/json.md @@ -0,0 +1,66 @@ +- [Making Raw JSON REST Requests](#making-raw-json-rest-requests) + - [GET](#get) + - [PUT](#put) + - [POST](#post) + - [DELETE](#delete) + +# Making Raw JSON REST Requests + +The OpenSearch client implements many high-level REST DSLs that invoke OpenSearch APIs. However you may find yourself in a situation that requires you to invoke an API that is not supported by the client. Use `client.transport.perform_request` to do so. See [samples/json](../samples/json) for a complete working sample. + +## GET + +The following example returns the server version information via `GET /`. + +```python +info = client.transport.perform_request('GET', '/') +print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") +``` + +Note that the client will parse the response as JSON when appropriate. + +## PUT + +The following example creates an index. + +```python +index_body = { + 'settings': { + 'index': { + 'number_of_shards': 4 + } + } +} + +client.transport.perform_request("PUT", "/movies", body=index_body) +``` + +Note that the client will raise errors automatically. For example, if the index already exists, an `opensearchpy.exceptions.RequestError: RequestError(400, 'resource_already_exists_exception',` will be thrown. + +## POST + +The following example searches for a document. + +```python +q = 'miller' + +query = { + 'size': 5, + 'query': { + 'multi_match': { + 'query': q, + 'fields': ['title^2', 'director'] + } + } +} + +client.transport.perform_request("POST", "/movies/_search", body = query) +``` + +## DELETE + +The following example deletes an index. + +```python +client.transport.perform_request("DELETE", "/movies") +``` diff --git a/samples/json/hello-async.py b/samples/json/hello-async.py new file mode 100755 index 00000000..aa4840c4 --- /dev/null +++ b/samples/json/hello-async.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import asyncio + +from opensearchpy import AsyncOpenSearch + +async def main(): + # connect to OpenSearch + host = 'localhost' + port = 9200 + auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + + client = AsyncOpenSearch( + hosts = [{'host': host, 'port': port}], + http_auth = auth, + use_ssl = True, + verify_certs = False, + ssl_show_warn = False + ) + + try: + info = await client.transport.perform_request('GET', '/') + print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + + # create an index + + index_name = 'movies' + + index_body = { + 'settings': { + 'index': { + 'number_of_shards': 4 + } + } + } + + print(await client.transport.perform_request("PUT", f"/{index_name}", body=index_body)) + + # add a document to the index + + document = { + 'title': 'Moneyball', + 'director': 'Bennett Miller', + 'year': '2011' + } + + id = '1' + + print(await client.transport.perform_request("PUT", f"/{index_name}/_doc/{id}?refresh=true", body = document)) + + # search for a document + + q = 'miller' + + query = { + 'size': 5, + 'query': { + 'multi_match': { + 'query': q, + 'fields': ['title^2', 'director'] + } + } + } + + print(await client.transport.perform_request("POST", f"/{index_name}/_search", body = query)) + + # delete the document + + print(await client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}")) + + # delete the index + + print(await client.transport.perform_request("DELETE", f"/{index_name}")) + + + finally: + await client.close() + +if __name__ == "__main__": + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + loop.run_until_complete(main()) + loop.close() + diff --git a/samples/json/hello.py b/samples/json/hello.py new file mode 100755 index 00000000..d5b8e70f --- /dev/null +++ b/samples/json/hello.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +from opensearchpy import OpenSearch + +# connect to OpenSearch + +host = 'localhost' +port = 9200 +auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + +client = OpenSearch( + hosts = [{'host': host, 'port': port}], + http_auth = auth, + use_ssl = True, + verify_certs = False, + ssl_show_warn = False +) + +info = client.transport.perform_request('GET', '/') +print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + +# create an index + +index_name = 'movies' + +index_body = { + 'settings': { + 'index': { + 'number_of_shards': 4 + } + } +} + +print(client.transport.perform_request("PUT", f"/{index_name}", body=index_body)) + +# add a document to the index + +document = { + 'title': 'Moneyball', + 'director': 'Bennett Miller', + 'year': '2011' +} + +id = '1' + +print(client.transport.perform_request("PUT", f"/{index_name}/_doc/{id}?refresh=true", body = document)) + +# search for a document + +q = 'miller' + +query = { + 'size': 5, + 'query': { + 'multi_match': { + 'query': q, + 'fields': ['title^2', 'director'] + } + } +} + +print(client.transport.perform_request("POST", f"/{index_name}/_search", body = query)) + +# delete the document + +print(client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}")) + +# delete the index + +print(client.transport.perform_request("DELETE", f"/{index_name}")) From 93b46981208d78aba4fda52e0bd034d9f611834e Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Mon, 23 Oct 2023 19:46:19 -0400 Subject: [PATCH 30/80] Added support for AWS Sigv4 for UrlLib3. (#547) * WIP: Added support for AWS Sigv4 for UrlLib3. Signed-off-by: dblock * Refactored common implementation. Signed-off-by: dblock * Added sigv4 samples. Signed-off-by: dblock * Updated CHANGELOG. Signed-off-by: dblock * Add documentation. Signed-off-by: dblock * Use the correct class in tests. Signed-off-by: dblock * Renamed samples. Signed-off-by: dblock * Split up requests and urllib3 unit tests. Signed-off-by: dblock * Rename AWSV4Signer. Signed-off-by: dblock * Clarified documentation of when to use Urllib3AWSV4SignerAuth vs. RequestHttpConnection. Signed-off-by: dblock * Move fetch_url inside the signer class. Signed-off-by: dblock * Added unit test for Urllib3AWSV4SignerAuth adding headers. Signed-off-by: dblock * Added unit test for signing to include query string. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + DEVELOPER_GUIDE.md | 16 +- guides/auth.md | 13 +- opensearchpy/__init__.py | 9 +- opensearchpy/connection/http_urllib3.py | 21 +- opensearchpy/helpers/__init__.py | 4 +- opensearchpy/helpers/__init__.pyi | 1 + opensearchpy/helpers/signer.py | 125 +- samples/aws/README.md | 22 + samples/aws/search-requests.py | 69 + samples/aws/search-urllib3.py | 69 + samples/poetry.lock | 688 +++------- samples/pyproject.toml | 3 +- .../test_async/test_connection.py | 8 +- test_opensearchpy/test_connection.py | 1171 ----------------- test_opensearchpy/test_connection/__init__.py | 25 + .../test_connection/test_base_connection.py | 231 ++++ .../test_requests_http_connection.py | 561 ++++++++ .../test_urllib3_http_connection.py | 406 ++++++ 19 files changed, 1677 insertions(+), 1766 deletions(-) create mode 100644 samples/aws/README.md create mode 100644 samples/aws/search-requests.py create mode 100644 samples/aws/search-urllib3.py delete mode 100644 test_opensearchpy/test_connection.py create mode 100644 test_opensearchpy/test_connection/__init__.py create mode 100644 test_opensearchpy/test_connection/test_base_connection.py create mode 100644 test_opensearchpy/test_connection/test_requests_http_connection.py create mode 100644 test_opensearchpy/test_connection/test_urllib3_http_connection.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cceae9e..843abeac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added `pool_maxsize` for `Urllib3HttpConnection` ([#535](https://github.com/opensearch-project/opensearch-py/pull/535)) - Added benchmarks ([#537](https://github.com/opensearch-project/opensearch-py/pull/537)) - Added guide on making raw JSON REST requests ([#542](https://github.com/opensearch-project/opensearch-py/pull/542)) +- Added support for AWS SigV4 for urllib3 ([#547](https://github.com/opensearch-project/opensearch-py/pull/547)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index ec39602f..5fe9cad1 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -45,7 +45,19 @@ docker run -d -p 9200:9200 -p 9600:9600 -e "discovery.type=single-node" opensear Tests require a live instance of OpenSearch running in docker. -This will start a new instance and run tests against the latest version of OpenSearch. +If you have one running. + +``` +python setup.py test +``` + +To run tests in a specific test file. + +``` +python setup.py test -s test_opensearchpy/test_connection.py +``` + +If you want to auto-start one, the following will start a new instance and run tests against the latest version of OpenSearch. ``` ./.ci/run-tests @@ -76,7 +88,7 @@ You can also run individual tests matching a pattern (`pytest -k [pattern]`). ``` ./.ci/run-tests true 1.3.0 test_no_http_compression -test_opensearchpy/test_connection.py::TestUrllib3Connection::test_no_http_compression PASSED [ 33%] +test_opensearchpy/test_connection.py::TestUrllib3HttpConnection::test_no_http_compression PASSED [ 33%] test_opensearchpy/test_connection.py::TestRequestsConnection::test_no_http_compression PASSED [ 66%] test_opensearchpy/test_async/test_connection.py::TestAIOHttpConnection::test_no_http_compression PASSED [100%] ``` diff --git a/guides/auth.md b/guides/auth.md index 3e7f4092..a07d3996 100644 --- a/guides/auth.md +++ b/guides/auth.md @@ -1,5 +1,6 @@ - [Authentication](#authentication) - [IAM Authentication](#iam-authentication) + - [IAM Authentication with a Synchronous Client](#iam-authentication-with-a-synchronous-client) - [IAM Authentication with an Async Client](#iam-authentication-with-an-async-client) - [Kerberos](#kerberos) @@ -9,24 +10,28 @@ OpenSearch allows you to use different methods for the authentication via `conne ## IAM Authentication -Opensearch-py supports IAM-based authentication via `AWSV4SignerAuth`, which uses `RequestHttpConnection` as the transport class for communicating with OpenSearch clusters running in Amazon Managed OpenSearch and OpenSearch Serverless, and works in conjunction with [botocore](https://pypi.org/project/botocore/). +This library supports IAM-based authentication when communicating with OpenSearch clusters running in Amazon Managed OpenSearch and OpenSearch Serverless. + +## IAM Authentication with a Synchronous Client + +For `Urllib3HttpConnection` use `Urllib3AWSV4SignerAuth`, and for `RequestHttpConnection` use `RequestsAWSV4SignerAuth`. ```python -from opensearchpy import OpenSearch, RequestsHttpConnection, AWSV4SignerAuth +from opensearchpy import OpenSearch, Urllib3HttpConnection, Urllib3AWSV4SignerAuth import boto3 host = '' # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com region = 'us-west-2' service = 'es' # 'aoss' for OpenSearch Serverless credentials = boto3.Session().get_credentials() -auth = AWSV4SignerAuth(credentials, region, service) +auth = Urllib3AWSV4SignerAuth(credentials, region, service) client = OpenSearch( hosts = [{'host': host, 'port': 443}], http_auth = auth, use_ssl = True, verify_certs = True, - connection_class = RequestsHttpConnection, + connection_class = Urllib3HttpConnection, pool_maxsize = 20 ) diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 6669f179..6f26bc53 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -71,7 +71,12 @@ UnknownDslObject, ValidationException, ) -from .helpers import AWSV4SignerAsyncAuth, AWSV4SignerAuth +from .helpers import ( + AWSV4SignerAsyncAuth, + AWSV4SignerAuth, + RequestsAWSV4SignerAuth, + Urllib3AWSV4SignerAuth, +) from .helpers.aggs import A from .helpers.analysis import analyzer, char_filter, normalizer, token_filter, tokenizer from .helpers.document import Document, InnerDoc, MetaField @@ -166,6 +171,8 @@ "OpenSearchWarning", "OpenSearchDeprecationWarning", "AWSV4SignerAuth", + "Urllib3AWSV4SignerAuth", + "RequestsAWSV4SignerAuth", "AWSV4SignerAsyncAuth", "A", "AttrDict", diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index 4bc27bbb..0042cc3c 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -27,6 +27,7 @@ import ssl import time import warnings +from typing import Callable import urllib3 # type: ignore from urllib3.exceptions import ReadTimeoutError @@ -128,10 +129,17 @@ def __init__( opaque_id=opaque_id, **kwargs ) - if http_auth is not None: - if isinstance(http_auth, (tuple, list)): - http_auth = ":".join(http_auth) - self.headers.update(urllib3.make_headers(basic_auth=http_auth)) + + self.http_auth = http_auth + if self.http_auth is not None: + if isinstance(self.http_auth, Callable): + pass + elif isinstance(self.http_auth, (tuple, list)): + self.headers.update( + urllib3.make_headers(basic_auth=":".join(http_auth)) + ) + else: + self.headers.update(urllib3.make_headers(basic_auth=http_auth)) pool_class = urllib3.HTTPConnectionPool kw = {} @@ -218,6 +226,7 @@ def perform_request( url = "%s?%s" % (url, urlencode(params)) full_url = self.host + url + start = time.time() orig_body = body try: @@ -240,6 +249,10 @@ def perform_request( body = self._gzip_compress(body) request_headers["content-encoding"] = "gzip" + if self.http_auth is not None: + if isinstance(self.http_auth, Callable): + request_headers.update(self.http_auth(method, full_url, body)) + response = self.pool.urlopen( method, url, body, retries=Retry(False), headers=request_headers, **kw ) diff --git a/opensearchpy/helpers/__init__.py b/opensearchpy/helpers/__init__.py index 72a7d140..80dbf8bf 100644 --- a/opensearchpy/helpers/__init__.py +++ b/opensearchpy/helpers/__init__.py @@ -39,7 +39,7 @@ ) from .asyncsigner import AWSV4SignerAsyncAuth from .errors import BulkIndexError, ScanError -from .signer import AWSV4SignerAuth +from .signer import AWSV4SignerAuth, RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth __all__ = [ "BulkIndexError", @@ -54,6 +54,8 @@ "_process_bulk_chunk", "AWSV4SignerAuth", "AWSV4SignerAsyncAuth", + "RequestsAWSV4SignerAuth", + "Urllib3AWSV4SignerAuth", ] diff --git a/opensearchpy/helpers/__init__.pyi b/opensearchpy/helpers/__init__.pyi index 59b5cefd..a4711989 100644 --- a/opensearchpy/helpers/__init__.pyi +++ b/opensearchpy/helpers/__init__.pyi @@ -48,5 +48,6 @@ try: from .._async.helpers.actions import async_streaming_bulk as async_streaming_bulk from .asyncsigner import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth from .signer import AWSV4SignerAuth as AWSV4SignerAuth + from .signer import RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth except (ImportError, SyntaxError): pass diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index 176f6ac9..436909e7 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -8,6 +8,7 @@ # GitHub history for details. import sys +from typing import Any, Callable, Dict import requests @@ -17,38 +18,12 @@ from urllib.parse import parse_qs, urlencode, urlparse -def fetch_url(prepared_request): # type: ignore +class AWSV4Signer: """ - This is a util method that helps in reconstructing the request url. - :param prepared_request: unsigned request - :return: reconstructed url + Generic AWS V4 Request Signer. """ - url = urlparse(prepared_request.url) - path = url.path or "/" - - # fetch the query string if present in the request - querystring = "" - if url.query: - querystring = "?" + urlencode( - parse_qs(url.query, keep_blank_values=True), doseq=True - ) - - # fetch the host information from headers - headers = dict( - (key.lower(), value) for key, value in prepared_request.headers.items() - ) - location = headers.get("host") or url.netloc - - # construct the url and return - return url.scheme + "://" + location + path + querystring - -class AWSV4SignerAuth(requests.auth.AuthBase): - """ - AWS V4 Request Signer for Requests. - """ - - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region: str, service: str = "es") -> Any: # type: ignore if not credentials: raise ValueError("Credentials cannot be empty") self.credentials = credentials @@ -61,27 +36,20 @@ def __init__(self, credentials, region, service="es"): # type: ignore raise ValueError("Service name cannot be empty") self.service = service - def __call__(self, request): # type: ignore - return self._sign_request(request) # type: ignore - - def _sign_request(self, prepared_request): # type: ignore + def sign(self, method: str, url: str, body: Any) -> Dict[str, str]: """ - This method helps in signing the request by injecting the required headers. - :param prepared_request: unsigned request - :return: signed request + This method signs the request and returns headers. + :param method: HTTP method + :param url: url + :param body: body + :return: headers """ from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest - url = fetch_url(prepared_request) # type: ignore - # create an AWS request object and sign it using SigV4Auth - aws_request = AWSRequest( - method=prepared_request.method.upper(), - url=url, - data=prepared_request.body, - ) + aws_request = AWSRequest(method=method.upper(), url=url, data=body) # credentials objects expose access_key, secret_key and token attributes # via @property annotations that call _refresh() on every access, @@ -101,9 +69,74 @@ def _sign_request(self, prepared_request): # type: ignore sig_v4_auth.add_auth(aws_request) # copy the headers from AWS request object into the prepared_request - prepared_request.headers.update(dict(aws_request.headers.items())) - prepared_request.headers["X-Amz-Content-SHA256"] = sig_v4_auth.payload( - aws_request + headers = dict(aws_request.headers.items()) + headers["X-Amz-Content-SHA256"] = sig_v4_auth.payload(aws_request) + + return headers + + +class RequestsAWSV4SignerAuth(requests.auth.AuthBase): + """ + AWS V4 Request Signer for Requests. + """ + + def __init__(self, credentials, region, service="es"): # type: ignore + self.signer = AWSV4Signer(credentials, region, service) + + def __call__(self, request): # type: ignore + return self._sign_request(request) # type: ignore + + def _sign_request(self, prepared_request): # type: ignore + """ + This method helps in signing the request by injecting the required headers. + :param prepared_request: unsigned request + :return: signed request + """ + + prepared_request.headers.update( + self.signer.sign( + prepared_request.method, + self._fetch_url(prepared_request), # type: ignore + prepared_request.body, + ) ) return prepared_request + + def _fetch_url(self, prepared_request): # type: ignore + """ + This is a util method that helps in reconstructing the request url. + :param prepared_request: unsigned request + :return: reconstructed url + """ + url = urlparse(prepared_request.url) + path = url.path or "/" + + # fetch the query string if present in the request + querystring = "" + if url.query: + querystring = "?" + urlencode( + parse_qs(url.query, keep_blank_values=True), doseq=True + ) + + # fetch the host information from headers + headers = dict( + (key.lower(), value) for key, value in prepared_request.headers.items() + ) + location = headers.get("host") or url.netloc + + # construct the url and return + return url.scheme + "://" + location + path + querystring + + +# Deprecated: use RequestsAWSV4SignerAuth +class AWSV4SignerAuth(RequestsAWSV4SignerAuth): + pass + + +class Urllib3AWSV4SignerAuth(Callable): # type: ignore + def __init__(self, credentials, region, service="es"): # type: ignore + self.signer = AWSV4Signer(credentials, region, service) + + def __call__(self, method: str, url: str, body: Any) -> Dict[str, str]: + return self.signer.sign(method, url, body) diff --git a/samples/aws/README.md b/samples/aws/README.md new file mode 100644 index 00000000..17ad4ee0 --- /dev/null +++ b/samples/aws/README.md @@ -0,0 +1,22 @@ +## AWS SigV4 Samples + +Create an OpenSearch domain in (AWS) which support IAM based AuthN/AuthZ. + +``` +export AWS_ACCESS_KEY_ID= +export AWS_SECRET_ACCESS_KEY= +export AWS_SESSION_TOKEN= +export AWS_REGION=us-west-2 + +export SERVICE=es # use "aoss" for OpenSearch Serverless. +export ENDPOINT=https://....us-west-2.es.amazonaws.com + +poetry run aws/search-urllib.py +``` + +This will output the version of OpenSearch and a search result. + +``` +opensearch: 2.3.0 +{'director': 'Bennett Miller', 'title': 'Moneyball', 'year': 2011} +``` diff --git a/samples/aws/search-requests.py b/samples/aws/search-requests.py new file mode 100644 index 00000000..1f14f55e --- /dev/null +++ b/samples/aws/search-requests.py @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging + +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session +from opensearchpy import RequestsAWSV4SignerAuth, OpenSearch, RequestsHttpConnection + +# verbose logging +logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) + +# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com +url = urlparse(environ['ENDPOINT']) +region = environ.get('AWS_REGION', 'us-east-1') +service = environ.get('SERVICE', 'es') + +credentials = Session().get_credentials() + +auth = RequestsAWSV4SignerAuth(credentials, region, service) + +client = OpenSearch( + hosts=[{ + 'host': url.netloc, + 'port': url.port or 443 + }], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + timeout=30 +) + +# TODO: remove when OpenSearch Serverless adds support for / +if service == 'es': + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + +# create an index +index = 'movies' +client.indices.create(index=index) + +try: + # index data + document = {'director': 'Bennett Miller', 'title': 'Moneyball', 'year': 2011} + client.index(index=index, body=document, id='1') + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={'query': {'match': {'director': 'miller'}}}) + for hit in results['hits']['hits']: + print(hit['_source']) + + # delete the document + client.delete(index=index, id='1') +finally: + # delete the index + client.indices.delete(index=index) \ No newline at end of file diff --git a/samples/aws/search-urllib3.py b/samples/aws/search-urllib3.py new file mode 100644 index 00000000..46d6a89f --- /dev/null +++ b/samples/aws/search-urllib3.py @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging + +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session +from opensearchpy import Urllib3AWSV4SignerAuth, OpenSearch, Urllib3HttpConnection + +# verbose logging +logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) + +# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com +url = urlparse(environ['ENDPOINT']) +region = environ.get('AWS_REGION', 'us-east-1') +service = environ.get('SERVICE', 'es') + +credentials = Session().get_credentials() + +auth = Urllib3AWSV4SignerAuth(credentials, region, service) + +client = OpenSearch( + hosts=[{ + 'host': url.netloc, + 'port': url.port or 443 + }], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=Urllib3HttpConnection, + timeout=30 +) + +# TODO: remove when OpenSearch Serverless adds support for / +if service == 'es': + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + +# create an index +index = 'movies' +client.indices.create(index=index) + +try: + # index data + document = {'director': 'Bennett Miller', 'title': 'Moneyball', 'year': 2011} + client.index(index=index, body=document, id='1') + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={'query': {'match': {'director': 'miller'}}}) + for hit in results['hits']['hits']: + print(hit['_source']) + + # delete the document + client.delete(index=index, id='1') +finally: + # delete the index + client.indices.delete(index=index) \ No newline at end of file diff --git a/samples/poetry.lock b/samples/poetry.lock index e8e8b7cc..55fb558d 100644 --- a/samples/poetry.lock +++ b/samples/poetry.lock @@ -1,174 +1,45 @@ # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] -name = "aiohttp" -version = "3.8.5" -description = "Async http client/server framework (asyncio)" +name = "boto3" +version = "1.28.67" +description = "The AWS SDK for Python" optional = false -python-versions = ">=3.6" +python-versions = ">= 3.7" files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "boto3-1.28.67-py3-none-any.whl", hash = "sha256:7d17f987a8b4f804e5ae509a30589736a72c6db7b0e2fb1338997128fdc9a3ec"}, + {file = "boto3-1.28.67.tar.gz", hash = "sha256:8db91c0648c9dcde1cf7fb4c15cd50da1fdef573595a9b9c769a303c7531b9a6"}, ] [package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" -asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} -attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} -yarl = ">=1.0,<2.0" +botocore = ">=1.31.67,<1.32.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.7.0,<0.8.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" +name = "botocore" +version = "1.31.67" +description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">=3.7" +python-versions = ">= 3.7" files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "botocore-1.31.67-py3-none-any.whl", hash = "sha256:487fb6ee4a6612613da370599b1a1aca0e159dd9e94b2e8aaa8e6ad9cc546ded"}, + {file = "botocore-1.31.67.tar.gz", hash = "sha256:ab3b73a2e03efa1c534a94f8db4a5cf45629a53e5478d2d154b0a3e2ffb05249"}, ] [package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.6" -files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} - -[[package]] -name = "asynctest" -version = "0.13.0" -description = "Enhance the standard unittest package with features for testing asyncio libraries" -optional = false -python-versions = ">=3.5" -files = [ - {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, - {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, -] - -[[package]] -name = "attrs" -version = "23.1.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, ] -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +crt = ["awscrt (==0.16.26)"] [[package]] name = "certifi" @@ -183,169 +54,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] - -[[package]] -name = "frozenlist" -version = "1.3.3" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.7" -files = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, ] [[package]] @@ -360,134 +163,39 @@ files = [ ] [[package]] -name = "importlib-metadata" -version = "6.7.0" -description = "Read metadata from Python packages" +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] [[package]] name = "opensearch-py" -version = "2.2.0" +version = "2.3.2" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -files = [] -develop = false +files = [ + {file = "opensearch-py-2.3.2.tar.gz", hash = "sha256:96e470b55107fd5bfd873722dc9808c333360eacfa174341f5cc2d021aa30448"}, + {file = "opensearch_py-2.3.2-py2.py3-none-any.whl", hash = "sha256:b1d6607380c8f19d90c142470939d051f0bac96069ce0ac25970b3c39c431f8b"}, +] [package.dependencies] -aiohttp = {version = ">=3,<4", optional = true, markers = "extra == \"async\""} certifi = ">=2022.12.07" python-dateutil = "*" requests = ">=2.4.0,<3.0.0" six = "*" -urllib3 = ">=1.21.1,<2" +urllib3 = ">=1.26.9" [package.extras] async = ["aiohttp (>=3,<4)"] -develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] -docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] -kerberos = ["requests_kerberos"] - -[package.source] -type = "directory" -url = ".." +develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst-parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] +docs = ["myst-parser", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] +kerberos = ["requests-kerberos"] [[package]] name = "python-dateutil" @@ -524,6 +232,23 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "s3transfer" +version = "0.7.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"}, + {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"}, +] + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + [[package]] name = "six" version = "1.16.0" @@ -535,137 +260,40 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - [[package]] name = "urllib3" -version = "1.26.16" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} - -[[package]] -name = "zipp" -version = "3.15.0" -description = "Backport of pathlib-compatible object wrapper for zip files" +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "daae1667da61b85f9bbc2e5c484721c424594da7cb08fe9cf85c08d6731bcc52" +content-hash = "1309989011bed3cb46e36fc451b65f040ef9fe9cecbe3f3706be240d4ea6d52e" diff --git a/samples/pyproject.toml b/samples/pyproject.toml index 380e2c9a..8a89367f 100644 --- a/samples/pyproject.toml +++ b/samples/pyproject.toml @@ -8,7 +8,8 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.7" -opensearch-py = { path = "../", extras=["async"] } +opensearch-py = { path = "../", extras=["async"], develop = true } +boto3 = "^1.28" [build-system] requires = ["poetry-core"] diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 147a6a3a..3df51645 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -48,11 +48,6 @@ pytestmark = pytest.mark.asyncio -def gzip_decompress(data): - buf = gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") - return buf.read() - - class TestAIOHttpConnection: async def _get_mock_connection( self, @@ -130,7 +125,8 @@ async def test_http_compression(self): _, kwargs = con.session.request.call_args - assert gzip_decompress(kwargs["data"]) == b"{}" + buf = gzip.GzipFile(fileobj=io.BytesIO(kwargs["data"]), mode="rb") + assert buf.read() == b"{}" assert kwargs["headers"]["accept-encoding"] == "gzip,deflate" assert kwargs["headers"]["content-encoding"] == "gzip" diff --git a/test_opensearchpy/test_connection.py b/test_opensearchpy/test_connection.py deleted file mode 100644 index 5ec6e09d..00000000 --- a/test_opensearchpy/test_connection.py +++ /dev/null @@ -1,1171 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -import gzip -import io -import json -import os -import re -import ssl -import sys -import unittest -import uuid -import warnings -from platform import python_version - -import pytest -import six -import urllib3 -from mock import Mock, patch -from requests.auth import AuthBase -from urllib3._collections import HTTPHeaderDict - -from opensearchpy import __versionstr__ -from opensearchpy.compat import reraise_exceptions -from opensearchpy.connection import ( - Connection, - RequestsHttpConnection, - Urllib3HttpConnection, -) -from opensearchpy.exceptions import ( - ConflictError, - ConnectionError, - NotFoundError, - RequestError, - TransportError, -) - -from .test_cases import SkipTest, TestCase - -try: - from pytest import MonkeyPatch -except ImportError: # Old version of pytest for 2.7 and 3.5 - from _pytest.monkeypatch import MonkeyPatch - -from pytest import raises - -from opensearchpy import OpenSearch, serializer -from opensearchpy.connection import connections - -if sys.version_info > (3, 0): - from test_opensearchpy.TestHttpServer import TestHTTPServer - - -def gzip_decompress(data): - buf = gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") - return buf.read() - - -class TestBaseConnection(TestCase): - def test_empty_warnings(self): - con = Connection() - with warnings.catch_warnings(record=True) as w: - con._raise_warnings(()) - con._raise_warnings([]) - - self.assertEqual(w, []) - - def test_raises_warnings(self): - con = Connection() - - with warnings.catch_warnings(record=True) as warn: - con._raise_warnings(['299 OpenSearch-7.6.1-aa751 "this is deprecated"']) - - self.assertEqual([str(w.message) for w in warn], ["this is deprecated"]) - - with warnings.catch_warnings(record=True) as warn: - con._raise_warnings( - [ - '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', - '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', - '299 OpenSearch-7.6.1-aa751 "guess what? deprecated"', - ] - ) - - self.assertEqual( - [str(w.message) for w in warn], - ["this is also deprecated", "guess what? deprecated"], - ) - - def test_raises_warnings_when_folded(self): - con = Connection() - with warnings.catch_warnings(record=True) as warn: - con._raise_warnings( - [ - '299 OpenSearch-7.6.1-aa751 "warning",' - '299 OpenSearch-7.6.1-aa751 "folded"', - ] - ) - - self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) - - @unittest.skipIf(six.PY2, "not compatible with python2") - def test_raises_errors(self): - con = Connection() - with self.assertLogs("opensearch") as captured, self.assertRaises( - NotFoundError - ): - con._raise_error(404, "Not found", "application/json") - self.assertEqual(len(captured.output), 1) - - # NB: this should assertNoLogs() but that method is not available until python3.10 - with self.assertRaises(NotFoundError): - con._raise_error(404, "Not found", "text/plain; charset=UTF-8") - - def test_ipv6_host_and_port(self): - for kwargs, expected_host in [ - ({"host": "::1"}, "http://[::1]:9200"), - ({"host": "::1", "port": 443}, "http://[::1]:443"), - ({"host": "::1", "use_ssl": True}, "https://[::1]:9200"), - ({"host": "127.0.0.1", "port": 1234}, "http://127.0.0.1:1234"), - ({"host": "localhost", "use_ssl": True}, "https://localhost:9200"), - ]: - conn = Connection(**kwargs) - assert conn.host == expected_host - - def test_compatibility_accept_header(self): - try: - conn = Connection() - assert "accept" not in conn.headers - - os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "0" - - conn = Connection() - assert "accept" not in conn.headers - - os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "1" - - conn = Connection() - assert ( - conn.headers["accept"] - == "application/vnd.elasticsearch+json;compatible-with=7" - ) - finally: - os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") - - def test_ca_certs_ssl_cert_file(self): - cert = "/path/to/clientcert.pem" - with MonkeyPatch().context() as monkeypatch: - monkeypatch.setenv("SSL_CERT_FILE", cert) - assert Connection.default_ca_certs() == cert - - def test_ca_certs_ssl_cert_dir(self): - cert = "/path/to/clientcert/dir" - with MonkeyPatch().context() as monkeypatch: - monkeypatch.setenv("SSL_CERT_DIR", cert) - assert Connection.default_ca_certs() == cert - - def test_ca_certs_certifi(self): - import certifi - - assert Connection.default_ca_certs() == certifi.where() - - def test_no_ca_certs(self): - with MonkeyPatch().context() as monkeypatch: - monkeypatch.setitem(sys.modules, "certifi", None) - assert Connection.default_ca_certs() is None - - -class TestUrllib3Connection(TestCase): - def _get_mock_connection(self, connection_params={}, response_body=b"{}"): - con = Urllib3HttpConnection(**connection_params) - - def _dummy_urlopen(*args, **kwargs): - dummy_response = Mock() - dummy_response.headers = HTTPHeaderDict({}) - dummy_response.status = 200 - dummy_response.data = response_body - _dummy_urlopen.call_args = (args, kwargs) - return dummy_response - - con.pool.urlopen = _dummy_urlopen - return con - - def test_ssl_context(self): - try: - context = ssl.create_default_context() - except AttributeError: - # if create_default_context raises an AttributeError Exception - # it means SSLContext is not available for that version of python - # and we should skip this test. - raise SkipTest( - "Test test_ssl_context is skipped cause SSLContext is not available for this version of ptyhon" - ) - - con = Urllib3HttpConnection(use_ssl=True, ssl_context=context) - self.assertEqual(len(con.pool.conn_kw.keys()), 1) - self.assertIsInstance(con.pool.conn_kw["ssl_context"], ssl.SSLContext) - self.assertTrue(con.use_ssl) - - def test_opaque_id(self): - con = Urllib3HttpConnection(opaque_id="app-1") - self.assertEqual(con.headers["x-opaque-id"], "app-1") - - def test_no_http_compression(self): - con = self._get_mock_connection() - self.assertFalse(con.http_compress) - self.assertNotIn("accept-encoding", con.headers) - - con.perform_request("GET", "/") - - (_, _, req_body), kwargs = con.pool.urlopen.call_args - - self.assertFalse(req_body) - self.assertNotIn("accept-encoding", kwargs["headers"]) - self.assertNotIn("content-encoding", kwargs["headers"]) - - def test_http_compression(self): - con = self._get_mock_connection({"http_compress": True}) - self.assertTrue(con.http_compress) - self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") - - # 'content-encoding' shouldn't be set at a connection level. - # Should be applied only if the request is sent with a body. - self.assertNotIn("content-encoding", con.headers) - - con.perform_request("GET", "/", body=b"{}") - - (_, _, req_body), kwargs = con.pool.urlopen.call_args - - self.assertEqual(gzip_decompress(req_body), b"{}") - self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") - self.assertEqual(kwargs["headers"]["content-encoding"], "gzip") - - con.perform_request("GET", "/") - - (_, _, req_body), kwargs = con.pool.urlopen.call_args - - self.assertFalse(req_body) - self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") - self.assertNotIn("content-encoding", kwargs["headers"]) - - def test_default_user_agent(self): - con = Urllib3HttpConnection() - self.assertEqual( - con._get_default_user_agent(), - "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()), - ) - - def test_timeout_set(self): - con = Urllib3HttpConnection(timeout=42) - self.assertEqual(42, con.timeout) - - def test_keep_alive_is_on_by_default(self): - con = Urllib3HttpConnection() - self.assertEqual( - { - "connection": "keep-alive", - "content-type": "application/json", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - def test_http_auth(self): - con = Urllib3HttpConnection(http_auth="username:secret") - self.assertEqual( - { - "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", - "connection": "keep-alive", - "content-type": "application/json", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - def test_http_auth_tuple(self): - con = Urllib3HttpConnection(http_auth=("username", "secret")) - self.assertEqual( - { - "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", - "content-type": "application/json", - "connection": "keep-alive", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - def test_http_auth_list(self): - con = Urllib3HttpConnection(http_auth=["username", "secret"]) - self.assertEqual( - { - "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", - "content-type": "application/json", - "connection": "keep-alive", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_as_http_auth(self): - region = "us-west-2" - - import requests - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - auth = AWSV4SignerAuth(self.mock_session(), region) - con = RequestsHttpConnection(http_auth=auth) - prepared_request = requests.Request("GET", "http://localhost").prepare() - auth(prepared_request) - self.assertEqual(auth, con.session.auth) - self.assertIn("Authorization", prepared_request.headers) - self.assertIn("X-Amz-Date", prepared_request.headers) - self.assertIn("X-Amz-Security-Token", prepared_request.headers) - self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_when_region_is_null(self): - session = self.mock_session() - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth(session, None) - assert str(e.value) == "Region cannot be empty" - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth(session, "") - assert str(e.value) == "Region cannot be empty" - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_when_credentials_is_null(self): - region = "us-west-1" - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth(None, region) - assert str(e.value) == "Credentials cannot be empty" - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth("", region) - assert str(e.value) == "Credentials cannot be empty" - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_when_service_is_specified(self): - region = "us-west-1" - service = "aoss" - - import requests - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - auth = AWSV4SignerAuth(self.mock_session(), region, service) - con = RequestsHttpConnection(http_auth=auth) - prepared_request = requests.Request("GET", "http://localhost").prepare() - auth(prepared_request) - self.assertEqual(auth, con.session.auth) - self.assertIn("Authorization", prepared_request.headers) - self.assertIn("X-Amz-Date", prepared_request.headers) - self.assertIn("X-Amz-Security-Token", prepared_request.headers) - - def mock_session(self): - access_key = uuid.uuid4().hex - secret_key = uuid.uuid4().hex - token = uuid.uuid4().hex - dummy_session = Mock() - dummy_session.access_key = access_key - dummy_session.secret_key = secret_key - dummy_session.token = token - del dummy_session.get_frozen_credentials - - return dummy_session - - def test_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) - self.assertEqual(1, len(w)) - self.assertEqual( - "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", - str(w[0].message), - ) - - self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = Urllib3HttpConnection( - use_ssl=True, verify_certs=False, ssl_show_warn=False - ) - self.assertEqual(0, len(w)) - - self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - - def test_doesnt_use_https_if_not_specified(self): - con = Urllib3HttpConnection() - self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) - - def test_no_warning_when_using_ssl_context(self): - ctx = ssl.create_default_context() - with warnings.catch_warnings(record=True) as w: - Urllib3HttpConnection(ssl_context=ctx) - self.assertEqual(0, len(w)) - - def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): - for kwargs in ( - {"ssl_show_warn": False}, - {"ssl_show_warn": True}, - {"verify_certs": True}, - {"verify_certs": False}, - {"ca_certs": "/path/to/certs"}, - {"ssl_show_warn": True, "ca_certs": "/path/to/certs"}, - ): - kwargs["ssl_context"] = ssl.create_default_context() - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - - Urllib3HttpConnection(**kwargs) - - self.assertEqual(1, len(w)) - self.assertEqual( - "When using `ssl_context`, all other SSL related kwargs are ignored", - str(w[0].message), - ) - - def test_uses_given_ca_certs(self): - path = "/path/to/my/ca_certs.pem" - c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) - self.assertEqual(path, c.pool.ca_certs) - - def test_uses_default_ca_certs(self): - c = Urllib3HttpConnection(use_ssl=True) - self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) - - def test_uses_no_ca_certs(self): - c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) - self.assertIsNone(c.pool.ca_certs) - - @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): - con = self._get_mock_connection(connection_params={"http_compress": True}) - con.perform_request("GET", "/", body=b'{"example": "body"}') - - self.assertEqual(2, logger.debug.call_count) - req, resp = logger.debug.call_args_list - - self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) - self.assertEqual("< {}", resp[0][0] % resp[0][1:]) - - def test_surrogatepass_into_bytes(self): - buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" - con = self._get_mock_connection(response_body=buf) - status, headers, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) # fmt: skip - - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) - def test_recursion_error_reraised(self): - conn = Urllib3HttpConnection() - - def urlopen_raise(*_, **__): - raise RecursionError("Wasn't modified!") - - conn.pool.urlopen = urlopen_raise - - with pytest.raises(RecursionError) as e: - conn.perform_request("GET", "/") - assert str(e.value) == "Wasn't modified!" - - -class TestSignerWithFrozenCredentials(TestUrllib3Connection): - def mock_session(self): - access_key = uuid.uuid4().hex - secret_key = uuid.uuid4().hex - token = uuid.uuid4().hex - dummy_session = Mock() - dummy_session.access_key = access_key - dummy_session.secret_key = secret_key - dummy_session.token = token - dummy_session.get_frozen_credentials = Mock(return_value=dummy_session) - - return dummy_session - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_frozen_credentials_as_http_auth(self): - region = "us-west-2" - - import requests - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - mock_session = self.mock_session() - - auth = AWSV4SignerAuth(mock_session, region) - con = RequestsHttpConnection(http_auth=auth) - prepared_request = requests.Request("GET", "http://localhost").prepare() - auth(prepared_request) - self.assertEqual(auth, con.session.auth) - self.assertIn("Authorization", prepared_request.headers) - self.assertIn("X-Amz-Date", prepared_request.headers) - self.assertIn("X-Amz-Security-Token", prepared_request.headers) - self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) - mock_session.get_frozen_credentials.assert_called_once() - - -class TestRequestsConnection(TestCase): - def _get_mock_connection( - self, connection_params={}, status_code=200, response_body=b"{}" - ): - con = RequestsHttpConnection(**connection_params) - - def _dummy_send(*args, **kwargs): - dummy_response = Mock() - dummy_response.headers = {} - dummy_response.status_code = status_code - dummy_response.content = response_body - dummy_response.request = args[0] - dummy_response.cookies = {} - _dummy_send.call_args = (args, kwargs) - return dummy_response - - con.session.send = _dummy_send - return con - - def _get_request(self, connection, *args, **kwargs): - if "body" in kwargs: - kwargs["body"] = kwargs["body"].encode("utf-8") - - status, headers, data = connection.perform_request(*args, **kwargs) - self.assertEqual(200, status) - self.assertEqual("{}", data) - - timeout = kwargs.pop("timeout", connection.timeout) - args, kwargs = connection.session.send.call_args - self.assertEqual(timeout, kwargs["timeout"]) - self.assertEqual(1, len(args)) - return args[0] - - def test_custom_http_auth_is_allowed(self): - auth = AuthBase() - c = RequestsHttpConnection(http_auth=auth) - - self.assertEqual(auth, c.session.auth) - - def test_timeout_set(self): - con = RequestsHttpConnection(timeout=42) - self.assertEqual(42, con.timeout) - - def test_opaque_id(self): - con = RequestsHttpConnection(opaque_id="app-1") - self.assertEqual(con.headers["x-opaque-id"], "app-1") - - def test_no_http_compression(self): - con = self._get_mock_connection() - - self.assertFalse(con.http_compress) - self.assertNotIn("content-encoding", con.session.headers) - - con.perform_request("GET", "/") - - req = con.session.send.call_args[0][0] - self.assertNotIn("content-encoding", req.headers) - self.assertNotIn("accept-encoding", req.headers) - - def test_http_compression(self): - con = self._get_mock_connection( - {"http_compress": True}, - ) - - self.assertTrue(con.http_compress) - - # 'content-encoding' shouldn't be set at a session level. - # Should be applied only if the request is sent with a body. - self.assertNotIn("content-encoding", con.session.headers) - - con.perform_request("GET", "/", body=b"{}") - - req = con.session.send.call_args[0][0] - self.assertEqual(req.headers["content-encoding"], "gzip") - self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") - - con.perform_request("GET", "/") - - req = con.session.send.call_args[0][0] - self.assertNotIn("content-encoding", req.headers) - self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") - - def test_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = self._get_mock_connection( - {"use_ssl": True, "url_prefix": "url", "verify_certs": False} - ) - self.assertEqual(1, len(w)) - self.assertEqual( - "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", - str(w[0].message), - ) - - request = self._get_request(con, "GET", "/") - - self.assertEqual("https://localhost:9200/url/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_uses_given_ca_certs(self): - path = "/path/to/my/ca_certs.pem" - c = RequestsHttpConnection(ca_certs=path) - self.assertEqual(path, c.session.verify) - - def test_uses_default_ca_certs(self): - c = RequestsHttpConnection() - self.assertEqual(Connection.default_ca_certs(), c.session.verify) - - def test_uses_no_ca_certs(self): - c = RequestsHttpConnection(verify_certs=False) - self.assertFalse(c.session.verify) - - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = self._get_mock_connection( - { - "use_ssl": True, - "url_prefix": "url", - "verify_certs": False, - "ssl_show_warn": False, - } - ) - self.assertEqual(0, len(w)) - - request = self._get_request(con, "GET", "/") - - self.assertEqual("https://localhost:9200/url/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_merge_headers(self): - con = self._get_mock_connection( - connection_params={"headers": {"h1": "v1", "h2": "v2"}} - ) - req = self._get_request(con, "GET", "/", headers={"h2": "v2p", "h3": "v3"}) - self.assertEqual(req.headers["h1"], "v1") - self.assertEqual(req.headers["h2"], "v2p") - self.assertEqual(req.headers["h3"], "v3") - - def test_default_headers(self): - con = self._get_mock_connection() - req = self._get_request(con, "GET", "/") - self.assertEqual(req.headers["content-type"], "application/json") - self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) - - def test_custom_headers(self): - con = self._get_mock_connection() - req = self._get_request( - con, - "GET", - "/", - headers={ - "content-type": "application/x-ndjson", - "user-agent": "custom-agent/1.2.3", - }, - ) - self.assertEqual(req.headers["content-type"], "application/x-ndjson") - self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") - - def test_http_auth(self): - con = RequestsHttpConnection(http_auth="username:secret") - self.assertEqual(("username", "secret"), con.session.auth) - - def test_http_auth_tuple(self): - con = RequestsHttpConnection(http_auth=("username", "secret")) - self.assertEqual(("username", "secret"), con.session.auth) - - def test_http_auth_list(self): - con = RequestsHttpConnection(http_auth=["username", "secret"]) - self.assertEqual(("username", "secret"), con.session.auth) - - def test_repr(self): - con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) - self.assertEqual( - "", repr(con) - ) - - def test_conflict_error_is_returned_on_409(self): - con = self._get_mock_connection(status_code=409) - self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") - - def test_not_found_error_is_returned_on_404(self): - con = self._get_mock_connection(status_code=404) - self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") - - def test_request_error_is_returned_on_400(self): - con = self._get_mock_connection(status_code=400) - self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") - - @patch("opensearchpy.connection.base.logger") - def test_head_with_404_doesnt_get_logged(self, logger): - con = self._get_mock_connection(status_code=404) - self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") - self.assertEqual(0, logger.warning.call_count) - - @patch("opensearchpy.connection.base.tracer") - @patch("opensearchpy.connection.base.logger") - def test_failed_request_logs_and_traces(self, logger, tracer): - con = self._get_mock_connection( - response_body=b'{"answer": 42}', status_code=500 - ) - self.assertRaises( - TransportError, - con.perform_request, - "GET", - "/", - {"param": 42}, - "{}".encode("utf-8"), - ) - - # trace request - self.assertEqual(1, tracer.info.call_count) - # trace response - self.assertEqual(1, tracer.debug.call_count) - # log url and duration - self.assertEqual(1, logger.warning.call_count) - self.assertTrue( - re.match( - r"^GET http://localhost:9200/\?param=42 \[status:500 request:0.[0-9]{3}s\]", - logger.warning.call_args[0][0] % logger.warning.call_args[0][1:], - ) - ) - - @patch("opensearchpy.connection.base.tracer") - @patch("opensearchpy.connection.base.logger") - def test_success_logs_and_traces(self, logger, tracer): - con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") - status, headers, data = con.perform_request( - "GET", - "/", - {"param": 42}, - """{"question": "what's that?"}""".encode("utf-8"), - ) - - # trace request - self.assertEqual(1, tracer.info.call_count) - self.assertEqual( - """curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n "question": "what\\u0027s that?"\n}'""", - tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], - ) - # trace response - self.assertEqual(1, tracer.debug.call_count) - self.assertTrue( - re.match( - r'#\[200\] \(0.[0-9]{3}s\)\n#{\n# "answer": "that\\u0027s it!"\n#}', - tracer.debug.call_args[0][0] % tracer.debug.call_args[0][1:], - ) - ) - - # log url and duration - self.assertEqual(1, logger.info.call_count) - self.assertTrue( - re.match( - r"GET http://localhost:9200/\?param=42 \[status:200 request:0.[0-9]{3}s\]", - logger.info.call_args[0][0] % logger.info.call_args[0][1:], - ) - ) - # log request body and response - self.assertEqual(2, logger.debug.call_count) - req, resp = logger.debug.call_args_list - self.assertEqual('> {"question": "what\'s that?"}', req[0][0] % req[0][1:]) - self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) - - @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): - con = self._get_mock_connection(connection_params={"http_compress": True}) - con.perform_request("GET", "/", body=b'{"example": "body"}') - - self.assertEqual(2, logger.debug.call_count) - req, resp = logger.debug.call_args_list - self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) - self.assertEqual("< {}", resp[0][0] % resp[0][1:]) - - con = self._get_mock_connection( - connection_params={"http_compress": True}, - status_code=500, - response_body=b'{"hello":"world"}', - ) - with pytest.raises(TransportError): - con.perform_request("GET", "/", body=b'{"example": "body2"}') - - self.assertEqual(4, logger.debug.call_count) - _, _, req, resp = logger.debug.call_args_list - self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) - self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) - - def test_defaults(self): - con = self._get_mock_connection() - request = self._get_request(con, "GET", "/") - - self.assertEqual("http://localhost:9200/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_params_properly_encoded(self): - con = self._get_mock_connection() - request = self._get_request( - con, "GET", "/", params={"param": "value with spaces"} - ) - - self.assertEqual("http://localhost:9200/?param=value+with+spaces", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_body_attached(self): - con = self._get_mock_connection() - request = self._get_request(con, "GET", "/", body='{"answer": 42}') - - self.assertEqual("http://localhost:9200/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) - - def test_http_auth_attached(self): - con = self._get_mock_connection({"http_auth": "username:secret"}) - request = self._get_request(con, "GET", "/") - - self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") - - @patch("opensearchpy.connection.base.tracer") - def test_url_prefix(self, tracer): - con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) - request = self._get_request( - con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 - ) - - self.assertEqual("http://localhost:9200/some-prefix/_search", request.url) - self.assertEqual("GET", request.method) - self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) - - # trace request - self.assertEqual(1, tracer.info.call_count) - self.assertEqual( - "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/_search?pretty' -d '{\n \"answer\": 42\n}'", - tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], - ) - - def test_surrogatepass_into_bytes(self): - buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" - con = self._get_mock_connection(response_body=buf) - status, headers, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) # fmt: skip - - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) - def test_recursion_error_reraised(self): - conn = RequestsHttpConnection() - - def send_raise(*_, **__): - raise RecursionError("Wasn't modified!") - - conn.session.send = send_raise - - with pytest.raises(RecursionError) as e: - conn.perform_request("GET", "/") - assert str(e.value) == "Wasn't modified!" - - -@pytest.mark.skipif( - sys.version_info < (3, 0), - reason="http_server is only available from python 3.x", -) -class TestConnectionHttpServer: - """Tests the HTTP connection implementations against a live server E2E""" - - @classmethod - def setup_class(cls): - # Start server - cls.server = TestHTTPServer(port=8080) - cls.server.start() - - @classmethod - def teardown_class(cls): - # Stop server - cls.server.stop() - - def httpserver(self, conn, **kwargs): - status, headers, data = conn.perform_request("GET", "/", **kwargs) - data = json.loads(data) - return (status, data) - - def test_urllib3_connection(self): - # Defaults - conn = Urllib3HttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - user_agent = conn._get_default_user_agent() - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=False - conn = Urllib3HttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=False, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=True - conn = Urllib3HttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=True, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # Headers - conn = Urllib3HttpConnection( - "localhost", - port=8080, - use_ssl=False, - http_compress=True, - headers={"header1": "value1"}, - timeout=60, - ) - status, data = self.httpserver( - conn, headers={"header2": "value2", "header1": "override!"} - ) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "Header1": "override!", - "Header2": "value2", - "User-Agent": user_agent, - } - - def test_urllib3_connection_error(self): - conn = Urllib3HttpConnection("not.a.host.name") - with pytest.raises(ConnectionError): - conn.perform_request("GET", "/") - - def test_requests_connection(self): - # Defaults - conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - user_agent = conn._get_default_user_agent() - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=False - conn = RequestsHttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=False, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=True - conn = RequestsHttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=True, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # Headers - conn = RequestsHttpConnection( - "localhost", - port=8080, - use_ssl=False, - http_compress=True, - headers={"header1": "value1"}, - timeout=60, - ) - status, data = self.httpserver( - conn, headers={"header2": "value2", "header1": "override!"} - ) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "Header1": "override!", - "Header2": "value2", - "User-Agent": user_agent, - } - - def test_requests_connection_error(self): - conn = RequestsHttpConnection("not.a.host.name") - with pytest.raises(ConnectionError): - conn.perform_request("GET", "/") - - -@pytest.mark.skipif( - sys.version_info < (3, 0), - reason="http_server is only available from python 3.x", -) -class TestRequestsConnectionRedirect: - @classmethod - def setup_class(cls): - # Start servers - cls.server1 = TestHTTPServer(port=8080) - cls.server1.start() - cls.server2 = TestHTTPServer(port=8090) - cls.server2.start() - - @classmethod - def teardown_class(cls): - # Stop servers - cls.server2.stop() - cls.server1.stop() - - # allow_redirects = False - def test_redirect_failure_when_allow_redirect_false(self): - conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - with pytest.raises(TransportError) as e: - conn.perform_request("GET", "/redirect", allow_redirects=False) - assert e.value.status_code == 302 - - # allow_redirects = True (Default) - def test_redirect_success_when_allow_redirect_true(self): - conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - user_agent = conn._get_default_user_agent() - status, headers, data = conn.perform_request("GET", "/redirect") - assert status == 200 - data = json.loads(data) - assert data["headers"] == { - "Host": "localhost:8090", - "Accept-Encoding": "identity", - "User-Agent": user_agent, - } - - -def test_default_connection_is_returned_by_default(): - c = connections.Connections() - - con, con2 = object(), object() - c.add_connection("default", con) - - c.add_connection("not-default", con2) - - assert c.get_connection() is con - - -def test_get_connection_created_connection_if_needed(): - c = connections.Connections() - c.configure(default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]}) - - default = c.get_connection() - local = c.get_connection("local") - - assert isinstance(default, OpenSearch) - assert isinstance(local, OpenSearch) - - assert [{"host": "opensearch.com"}] == default.transport.hosts - assert [{"host": "localhost"}] == local.transport.hosts - - -def test_configure_preserves_unchanged_connections(): - c = connections.Connections() - - c.configure(default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]}) - default = c.get_connection() - local = c.get_connection("local") - - c.configure( - default={"hosts": ["not-opensearch.com"]}, local={"hosts": ["localhost"]} - ) - new_default = c.get_connection() - new_local = c.get_connection("local") - - assert new_local is local - assert new_default is not default - - -def test_remove_connection_removes_both_conn_and_conf(): - c = connections.Connections() - - c.configure(default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]}) - c.add_connection("local2", object()) - - c.remove_connection("default") - c.get_connection("local2") - c.remove_connection("local2") - - with raises(Exception): - c.get_connection("local2") - c.get_connection("default") - - -def test_create_connection_constructs_client(): - c = connections.Connections() - c.create_connection("testing", hosts=["opensearch.com"]) - - con = c.get_connection("testing") - assert [{"host": "opensearch.com"}] == con.transport.hosts - - -def test_create_connection_adds_our_serializer(): - c = connections.Connections() - c.create_connection("testing", hosts=["opensearch.com"]) - - assert c.get_connection("testing").transport.serializer is serializer.serializer diff --git a/test_opensearchpy/test_connection/__init__.py b/test_opensearchpy/test_connection/__init__.py new file mode 100644 index 00000000..7e52ae22 --- /dev/null +++ b/test_opensearchpy/test_connection/__init__.py @@ -0,0 +1,25 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py new file mode 100644 index 00000000..63729206 --- /dev/null +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +import os +import sys +import unittest +import warnings + +import six + +from opensearchpy.connection import Connection +from opensearchpy.exceptions import NotFoundError + +from ..test_cases import TestCase + +try: + from pytest import MonkeyPatch +except ImportError: # Old version of pytest for 2.7 and 3.5 + from _pytest.monkeypatch import MonkeyPatch + +from pytest import raises + +from opensearchpy import OpenSearch, serializer +from opensearchpy.connection import connections + + +class TestBaseConnection(TestCase): + def test_empty_warnings(self): + con = Connection() + with warnings.catch_warnings(record=True) as w: + con._raise_warnings(()) + con._raise_warnings([]) + + self.assertEqual(w, []) + + def test_raises_warnings(self): + con = Connection() + + with warnings.catch_warnings(record=True) as warn: + con._raise_warnings(['299 OpenSearch-7.6.1-aa751 "this is deprecated"']) + + self.assertEqual([str(w.message) for w in warn], ["this is deprecated"]) + + with warnings.catch_warnings(record=True) as warn: + con._raise_warnings( + [ + '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', + '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', + '299 OpenSearch-7.6.1-aa751 "guess what? deprecated"', + ] + ) + + self.assertEqual( + [str(w.message) for w in warn], + ["this is also deprecated", "guess what? deprecated"], + ) + + def test_raises_warnings_when_folded(self): + con = Connection() + with warnings.catch_warnings(record=True) as warn: + con._raise_warnings( + [ + '299 OpenSearch-7.6.1-aa751 "warning",' + '299 OpenSearch-7.6.1-aa751 "folded"', + ] + ) + + self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) + + @unittest.skipIf(six.PY2, "not compatible with python2") + def test_raises_errors(self): + con = Connection() + with self.assertLogs("opensearch") as captured, self.assertRaises( + NotFoundError + ): + con._raise_error(404, "Not found", "application/json") + self.assertEqual(len(captured.output), 1) + + # NB: this should assertNoLogs() but that method is not available until python3.10 + with self.assertRaises(NotFoundError): + con._raise_error(404, "Not found", "text/plain; charset=UTF-8") + + def test_ipv6_host_and_port(self): + for kwargs, expected_host in [ + ({"host": "::1"}, "http://[::1]:9200"), + ({"host": "::1", "port": 443}, "http://[::1]:443"), + ({"host": "::1", "use_ssl": True}, "https://[::1]:9200"), + ({"host": "127.0.0.1", "port": 1234}, "http://127.0.0.1:1234"), + ({"host": "localhost", "use_ssl": True}, "https://localhost:9200"), + ]: + conn = Connection(**kwargs) + assert conn.host == expected_host + + def test_compatibility_accept_header(self): + try: + conn = Connection() + assert "accept" not in conn.headers + + os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "0" + + conn = Connection() + assert "accept" not in conn.headers + + os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "1" + + conn = Connection() + assert ( + conn.headers["accept"] + == "application/vnd.elasticsearch+json;compatible-with=7" + ) + finally: + os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") + + def test_ca_certs_ssl_cert_file(self): + cert = "/path/to/clientcert.pem" + with MonkeyPatch().context() as monkeypatch: + monkeypatch.setenv("SSL_CERT_FILE", cert) + assert Connection.default_ca_certs() == cert + + def test_ca_certs_ssl_cert_dir(self): + cert = "/path/to/clientcert/dir" + with MonkeyPatch().context() as monkeypatch: + monkeypatch.setenv("SSL_CERT_DIR", cert) + assert Connection.default_ca_certs() == cert + + def test_ca_certs_certifi(self): + import certifi + + assert Connection.default_ca_certs() == certifi.where() + + def test_no_ca_certs(self): + with MonkeyPatch().context() as monkeypatch: + monkeypatch.setitem(sys.modules, "certifi", None) + assert Connection.default_ca_certs() is None + + def test_default_connection_is_returned_by_default(self): + c = connections.Connections() + + con, con2 = object(), object() + c.add_connection("default", con) + + c.add_connection("not-default", con2) + + assert c.get_connection() is con + + def test_get_connection_created_connection_if_needed(self): + c = connections.Connections() + c.configure( + default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} + ) + + default = c.get_connection() + local = c.get_connection("local") + + assert isinstance(default, OpenSearch) + assert isinstance(local, OpenSearch) + + assert [{"host": "opensearch.com"}] == default.transport.hosts + assert [{"host": "localhost"}] == local.transport.hosts + + def test_configure_preserves_unchanged_connections(self): + c = connections.Connections() + + c.configure( + default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} + ) + default = c.get_connection() + local = c.get_connection("local") + + c.configure( + default={"hosts": ["not-opensearch.com"]}, local={"hosts": ["localhost"]} + ) + new_default = c.get_connection() + new_local = c.get_connection("local") + + assert new_local is local + assert new_default is not default + + def test_remove_connection_removes_both_conn_and_conf(self): + c = connections.Connections() + + c.configure( + default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} + ) + c.add_connection("local2", object()) + + c.remove_connection("default") + c.get_connection("local2") + c.remove_connection("local2") + + with raises(Exception): + c.get_connection("local2") + c.get_connection("default") + + def test_create_connection_constructs_client(self): + c = connections.Connections() + c.create_connection("testing", hosts=["opensearch.com"]) + + con = c.get_connection("testing") + assert [{"host": "opensearch.com"}] == con.transport.hosts + + def test_create_connection_adds_our_serializer(self): + c = connections.Connections() + c.create_connection("testing", hosts=["opensearch.com"]) + + assert c.get_connection("testing").transport.serializer is serializer.serializer diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py new file mode 100644 index 00000000..db426fa5 --- /dev/null +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -0,0 +1,561 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +import json +import re +import sys +import uuid +import warnings + +import pytest +from mock import Mock, patch +from requests.auth import AuthBase + +from opensearchpy.compat import reraise_exceptions +from opensearchpy.connection import Connection, RequestsHttpConnection +from opensearchpy.exceptions import ( + ConflictError, + NotFoundError, + RequestError, + TransportError, +) + +from ..test_cases import TestCase + +if sys.version_info > (3, 0): + from test_opensearchpy.TestHttpServer import TestHTTPServer + + +class TestRequestsHttpConnection(TestCase): + def _get_mock_connection( + self, connection_params={}, status_code=200, response_body=b"{}" + ): + con = RequestsHttpConnection(**connection_params) + + def _dummy_send(*args, **kwargs): + dummy_response = Mock() + dummy_response.headers = {} + dummy_response.status_code = status_code + dummy_response.content = response_body + dummy_response.request = args[0] + dummy_response.cookies = {} + _dummy_send.call_args = (args, kwargs) + return dummy_response + + con.session.send = _dummy_send + return con + + def _get_request(self, connection, *args, **kwargs): + if "body" in kwargs: + kwargs["body"] = kwargs["body"].encode("utf-8") + + status, headers, data = connection.perform_request(*args, **kwargs) + self.assertEqual(200, status) + self.assertEqual("{}", data) + + timeout = kwargs.pop("timeout", connection.timeout) + args, kwargs = connection.session.send.call_args + self.assertEqual(timeout, kwargs["timeout"]) + self.assertEqual(1, len(args)) + return args[0] + + def test_custom_http_auth_is_allowed(self): + auth = AuthBase() + c = RequestsHttpConnection(http_auth=auth) + + self.assertEqual(auth, c.session.auth) + + def test_timeout_set(self): + con = RequestsHttpConnection(timeout=42) + self.assertEqual(42, con.timeout) + + def test_opaque_id(self): + con = RequestsHttpConnection(opaque_id="app-1") + self.assertEqual(con.headers["x-opaque-id"], "app-1") + + def test_no_http_compression(self): + con = self._get_mock_connection() + + self.assertFalse(con.http_compress) + self.assertNotIn("content-encoding", con.session.headers) + + con.perform_request("GET", "/") + + req = con.session.send.call_args[0][0] + self.assertNotIn("content-encoding", req.headers) + self.assertNotIn("accept-encoding", req.headers) + + def test_http_compression(self): + con = self._get_mock_connection( + {"http_compress": True}, + ) + + self.assertTrue(con.http_compress) + + # 'content-encoding' shouldn't be set at a session level. + # Should be applied only if the request is sent with a body. + self.assertNotIn("content-encoding", con.session.headers) + + con.perform_request("GET", "/", body=b"{}") + + req = con.session.send.call_args[0][0] + self.assertEqual(req.headers["content-encoding"], "gzip") + self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") + + con.perform_request("GET", "/") + + req = con.session.send.call_args[0][0] + self.assertNotIn("content-encoding", req.headers) + self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") + + def test_uses_https_if_verify_certs_is_off(self): + with warnings.catch_warnings(record=True) as w: + con = self._get_mock_connection( + {"use_ssl": True, "url_prefix": "url", "verify_certs": False} + ) + self.assertEqual(1, len(w)) + self.assertEqual( + "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", + str(w[0].message), + ) + + request = self._get_request(con, "GET", "/") + + self.assertEqual("https://localhost:9200/url/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_uses_given_ca_certs(self): + path = "/path/to/my/ca_certs.pem" + c = RequestsHttpConnection(ca_certs=path) + self.assertEqual(path, c.session.verify) + + def test_uses_default_ca_certs(self): + c = RequestsHttpConnection() + self.assertEqual(Connection.default_ca_certs(), c.session.verify) + + def test_uses_no_ca_certs(self): + c = RequestsHttpConnection(verify_certs=False) + self.assertFalse(c.session.verify) + + def test_nowarn_when_uses_https_if_verify_certs_is_off(self): + with warnings.catch_warnings(record=True) as w: + con = self._get_mock_connection( + { + "use_ssl": True, + "url_prefix": "url", + "verify_certs": False, + "ssl_show_warn": False, + } + ) + self.assertEqual(0, len(w)) + + request = self._get_request(con, "GET", "/") + + self.assertEqual("https://localhost:9200/url/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_merge_headers(self): + con = self._get_mock_connection( + connection_params={"headers": {"h1": "v1", "h2": "v2"}} + ) + req = self._get_request(con, "GET", "/", headers={"h2": "v2p", "h3": "v3"}) + self.assertEqual(req.headers["h1"], "v1") + self.assertEqual(req.headers["h2"], "v2p") + self.assertEqual(req.headers["h3"], "v3") + + def test_default_headers(self): + con = self._get_mock_connection() + req = self._get_request(con, "GET", "/") + self.assertEqual(req.headers["content-type"], "application/json") + self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) + + def test_custom_headers(self): + con = self._get_mock_connection() + req = self._get_request( + con, + "GET", + "/", + headers={ + "content-type": "application/x-ndjson", + "user-agent": "custom-agent/1.2.3", + }, + ) + self.assertEqual(req.headers["content-type"], "application/x-ndjson") + self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") + + def test_http_auth(self): + con = RequestsHttpConnection(http_auth="username:secret") + self.assertEqual(("username", "secret"), con.session.auth) + + def test_http_auth_tuple(self): + con = RequestsHttpConnection(http_auth=("username", "secret")) + self.assertEqual(("username", "secret"), con.session.auth) + + def test_http_auth_list(self): + con = RequestsHttpConnection(http_auth=["username", "secret"]) + self.assertEqual(("username", "secret"), con.session.auth) + + def test_repr(self): + con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) + self.assertEqual( + "", repr(con) + ) + + def test_conflict_error_is_returned_on_409(self): + con = self._get_mock_connection(status_code=409) + self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") + + def test_not_found_error_is_returned_on_404(self): + con = self._get_mock_connection(status_code=404) + self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") + + def test_request_error_is_returned_on_400(self): + con = self._get_mock_connection(status_code=400) + self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") + + @patch("opensearchpy.connection.base.logger") + def test_head_with_404_doesnt_get_logged(self, logger): + con = self._get_mock_connection(status_code=404) + self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") + self.assertEqual(0, logger.warning.call_count) + + @patch("opensearchpy.connection.base.tracer") + @patch("opensearchpy.connection.base.logger") + def test_failed_request_logs_and_traces(self, logger, tracer): + con = self._get_mock_connection( + response_body=b'{"answer": 42}', status_code=500 + ) + self.assertRaises( + TransportError, + con.perform_request, + "GET", + "/", + {"param": 42}, + "{}".encode("utf-8"), + ) + + # trace request + self.assertEqual(1, tracer.info.call_count) + # trace response + self.assertEqual(1, tracer.debug.call_count) + # log url and duration + self.assertEqual(1, logger.warning.call_count) + self.assertTrue( + re.match( + r"^GET http://localhost:9200/\?param=42 \[status:500 request:0.[0-9]{3}s\]", + logger.warning.call_args[0][0] % logger.warning.call_args[0][1:], + ) + ) + + @patch("opensearchpy.connection.base.tracer") + @patch("opensearchpy.connection.base.logger") + def test_success_logs_and_traces(self, logger, tracer): + con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") + status, headers, data = con.perform_request( + "GET", + "/", + {"param": 42}, + """{"question": "what's that?"}""".encode("utf-8"), + ) + + # trace request + self.assertEqual(1, tracer.info.call_count) + self.assertEqual( + """curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n "question": "what\\u0027s that?"\n}'""", + tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], + ) + # trace response + self.assertEqual(1, tracer.debug.call_count) + self.assertTrue( + re.match( + r'#\[200\] \(0.[0-9]{3}s\)\n#{\n# "answer": "that\\u0027s it!"\n#}', + tracer.debug.call_args[0][0] % tracer.debug.call_args[0][1:], + ) + ) + + # log url and duration + self.assertEqual(1, logger.info.call_count) + self.assertTrue( + re.match( + r"GET http://localhost:9200/\?param=42 \[status:200 request:0.[0-9]{3}s\]", + logger.info.call_args[0][0] % logger.info.call_args[0][1:], + ) + ) + # log request body and response + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + self.assertEqual('> {"question": "what\'s that?"}', req[0][0] % req[0][1:]) + self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) + + @patch("opensearchpy.connection.base.logger") + def test_uncompressed_body_logged(self, logger): + con = self._get_mock_connection(connection_params={"http_compress": True}) + con.perform_request("GET", "/", body=b'{"example": "body"}') + + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) + self.assertEqual("< {}", resp[0][0] % resp[0][1:]) + + con = self._get_mock_connection( + connection_params={"http_compress": True}, + status_code=500, + response_body=b'{"hello":"world"}', + ) + with pytest.raises(TransportError): + con.perform_request("GET", "/", body=b'{"example": "body2"}') + + self.assertEqual(4, logger.debug.call_count) + _, _, req, resp = logger.debug.call_args_list + self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) + self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) + + def test_defaults(self): + con = self._get_mock_connection() + request = self._get_request(con, "GET", "/") + + self.assertEqual("http://localhost:9200/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_params_properly_encoded(self): + con = self._get_mock_connection() + request = self._get_request( + con, "GET", "/", params={"param": "value with spaces"} + ) + + self.assertEqual("http://localhost:9200/?param=value+with+spaces", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_body_attached(self): + con = self._get_mock_connection() + request = self._get_request(con, "GET", "/", body='{"answer": 42}') + + self.assertEqual("http://localhost:9200/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) + + def test_http_auth_attached(self): + con = self._get_mock_connection({"http_auth": "username:secret"}) + request = self._get_request(con, "GET", "/") + + self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") + + @patch("opensearchpy.connection.base.tracer") + def test_url_prefix(self, tracer): + con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) + request = self._get_request( + con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 + ) + + self.assertEqual("http://localhost:9200/some-prefix/_search", request.url) + self.assertEqual("GET", request.method) + self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) + + # trace request + self.assertEqual(1, tracer.info.call_count) + self.assertEqual( + "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/_search?pretty' -d '{\n \"answer\": 42\n}'", + tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], + ) + + def test_surrogatepass_into_bytes(self): + buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" + con = self._get_mock_connection(response_body=buf) + status, headers, data = con.perform_request("GET", "/") + self.assertEqual(u"你好\uda6a", data) # fmt: skip + + @pytest.mark.skipif( + not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" + ) + def test_recursion_error_reraised(self): + conn = RequestsHttpConnection() + + def send_raise(*_, **__): + raise RecursionError("Wasn't modified!") + + conn.session.send = send_raise + + with pytest.raises(RecursionError) as e: + conn.perform_request("GET", "/") + assert str(e.value) == "Wasn't modified!" + + def mock_session(self): + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + del dummy_session.get_frozen_credentials + + return dummy_session + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" + ) + def test_aws_signer_as_http_auth(self): + region = "us-west-2" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + auth = RequestsAWSV4SignerAuth(self.mock_session(), region) + con = RequestsHttpConnection(http_auth=auth) + prepared_request = requests.Request("GET", "http://localhost").prepare() + auth(prepared_request) + self.assertEqual(auth, con.session.auth) + self.assertIn("Authorization", prepared_request.headers) + self.assertIn("X-Amz-Date", prepared_request.headers) + self.assertIn("X-Amz-Security-Token", prepared_request.headers) + self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" + ) + def test_aws_signer_when_service_is_specified(self): + region = "us-west-1" + service = "aoss" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + auth = RequestsAWSV4SignerAuth(self.mock_session(), region, service) + con = RequestsHttpConnection(http_auth=auth) + prepared_request = requests.Request("GET", "http://localhost").prepare() + auth(prepared_request) + self.assertEqual(auth, con.session.auth) + self.assertIn("Authorization", prepared_request.headers) + self.assertIn("X-Amz-Date", prepared_request.headers) + self.assertIn("X-Amz-Security-Token", prepared_request.headers) + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" + ) + @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") + def test_aws_signer_signs_with_query_string(self, mock_sign): + region = "us-west-1" + service = "aoss" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + auth = RequestsAWSV4SignerAuth(self.mock_session(), region, service) + prepared_request = requests.Request( + "GET", "http://localhost", params={"key1": "value1", "key2": "value2"} + ).prepare() + auth(prepared_request) + self.assertEqual(mock_sign.call_count, 1) + self.assertEqual( + mock_sign.call_args[0], + ("GET", "http://localhost/?key1=value1&key2=value2", None), + ) + + +@pytest.mark.skipif( + sys.version_info < (3, 0), + reason="http_server is only available from python 3.x", +) +class TestRequestsConnectionRedirect: + @classmethod + def setup_class(cls): + # Start servers + cls.server1 = TestHTTPServer(port=8080) + cls.server1.start() + cls.server2 = TestHTTPServer(port=8090) + cls.server2.start() + + @classmethod + def teardown_class(cls): + # Stop servers + cls.server2.stop() + cls.server1.stop() + + # allow_redirects = False + def test_redirect_failure_when_allow_redirect_false(self): + conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) + with pytest.raises(TransportError) as e: + conn.perform_request("GET", "/redirect", allow_redirects=False) + assert e.value.status_code == 302 + + # allow_redirects = True (Default) + def test_redirect_success_when_allow_redirect_true(self): + conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) + user_agent = conn._get_default_user_agent() + status, headers, data = conn.perform_request("GET", "/redirect") + assert status == 200 + data = json.loads(data) + assert data["headers"] == { + "Host": "localhost:8090", + "Accept-Encoding": "identity", + "User-Agent": user_agent, + } + + +class TestSignerWithFrozenCredentials(TestRequestsHttpConnection): + def mock_session(self): + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + dummy_session.get_frozen_credentials = Mock(return_value=dummy_session) + + return dummy_session + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" + ) + def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth(self): + region = "us-west-2" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + mock_session = self.mock_session() + + auth = RequestsAWSV4SignerAuth(mock_session, region) + con = RequestsHttpConnection(http_auth=auth) + prepared_request = requests.Request("GET", "http://localhost").prepare() + auth(prepared_request) + self.assertEqual(auth, con.session.auth) + self.assertIn("Authorization", prepared_request.headers) + self.assertIn("X-Amz-Date", prepared_request.headers) + self.assertIn("X-Amz-Security-Token", prepared_request.headers) + self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) + mock_session.get_frozen_credentials.assert_called_once() diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py new file mode 100644 index 00000000..854e22e4 --- /dev/null +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -0,0 +1,406 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +import ssl +import sys +import uuid +import warnings +from gzip import GzipFile +from io import BytesIO +from platform import python_version + +import pytest +import urllib3 +from mock import Mock, patch +from urllib3._collections import HTTPHeaderDict + +from opensearchpy import __versionstr__ +from opensearchpy.compat import reraise_exceptions +from opensearchpy.connection import Connection, Urllib3HttpConnection + +from ..test_cases import SkipTest, TestCase + + +class TestUrllib3HttpConnection(TestCase): + def _get_mock_connection(self, connection_params={}, response_body=b"{}"): + con = Urllib3HttpConnection(**connection_params) + + def _dummy_urlopen(*args, **kwargs): + dummy_response = Mock() + dummy_response.headers = HTTPHeaderDict({}) + dummy_response.status = 200 + dummy_response.data = response_body + _dummy_urlopen.call_args = (args, kwargs) + return dummy_response + + con.pool.urlopen = _dummy_urlopen + return con + + def test_ssl_context(self): + try: + context = ssl.create_default_context() + except AttributeError: + # if create_default_context raises an AttributeError Exception + # it means SSLContext is not available for that version of python + # and we should skip this test. + raise SkipTest( + "Test test_ssl_context is skipped cause SSLContext is not available for this version of python" + ) + + con = Urllib3HttpConnection(use_ssl=True, ssl_context=context) + self.assertEqual(len(con.pool.conn_kw.keys()), 1) + self.assertIsInstance(con.pool.conn_kw["ssl_context"], ssl.SSLContext) + self.assertTrue(con.use_ssl) + + def test_opaque_id(self): + con = Urllib3HttpConnection(opaque_id="app-1") + self.assertEqual(con.headers["x-opaque-id"], "app-1") + + def test_no_http_compression(self): + con = self._get_mock_connection() + self.assertFalse(con.http_compress) + self.assertNotIn("accept-encoding", con.headers) + + con.perform_request("GET", "/") + + (_, _, req_body), kwargs = con.pool.urlopen.call_args + + self.assertFalse(req_body) + self.assertNotIn("accept-encoding", kwargs["headers"]) + self.assertNotIn("content-encoding", kwargs["headers"]) + + def test_http_compression(self): + con = self._get_mock_connection({"http_compress": True}) + self.assertTrue(con.http_compress) + self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") + + # 'content-encoding' shouldn't be set at a connection level. + # Should be applied only if the request is sent with a body. + self.assertNotIn("content-encoding", con.headers) + + con.perform_request("GET", "/", body=b"{}") + + (_, _, req_body), kwargs = con.pool.urlopen.call_args + + buf = GzipFile(fileobj=BytesIO(req_body), mode="rb") + + self.assertEqual(buf.read(), b"{}") + self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") + self.assertEqual(kwargs["headers"]["content-encoding"], "gzip") + + con.perform_request("GET", "/") + + (_, _, req_body), kwargs = con.pool.urlopen.call_args + + self.assertFalse(req_body) + self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") + self.assertNotIn("content-encoding", kwargs["headers"]) + + def test_default_user_agent(self): + con = Urllib3HttpConnection() + self.assertEqual( + con._get_default_user_agent(), + "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()), + ) + + def test_timeout_set(self): + con = Urllib3HttpConnection(timeout=42) + self.assertEqual(42, con.timeout) + + def test_keep_alive_is_on_by_default(self): + con = Urllib3HttpConnection() + self.assertEqual( + { + "connection": "keep-alive", + "content-type": "application/json", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + def test_http_auth(self): + con = Urllib3HttpConnection(http_auth="username:secret") + self.assertEqual( + { + "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", + "connection": "keep-alive", + "content-type": "application/json", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + def test_http_auth_tuple(self): + con = Urllib3HttpConnection(http_auth=("username", "secret")) + self.assertEqual( + { + "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", + "content-type": "application/json", + "connection": "keep-alive", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + def test_http_auth_list(self): + con = Urllib3HttpConnection(http_auth=["username", "secret"]) + self.assertEqual( + { + "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", + "content-type": "application/json", + "connection": "keep-alive", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" + ) + @patch( + "urllib3.HTTPConnectionPool.urlopen", + return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), + ) + def test_aws_signer_as_http_auth_adds_headers(self, mock_open): + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") + con = Urllib3HttpConnection(http_auth=auth, headers={"x": "y"}) + con.perform_request("GET", "/") + self.assertEqual(mock_open.call_count, 1) + headers = mock_open.call_args[1]["headers"] + self.assertEqual(headers["x"], "y") + self.assertTrue( + headers["Authorization"].startswith("AWS4-HMAC-SHA256 Credential=") + ) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + self.assertIn("X-Amz-Content-SHA256", headers) + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" + ) + def test_aws_signer_as_http_auth(self): + region = "us-west-2" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + auth = Urllib3AWSV4SignerAuth(self.mock_session(), region) + headers = auth("GET", "http://localhost", None) + self.assertIn("Authorization", headers) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + self.assertIn("X-Amz-Content-SHA256", headers) + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" + ) + def test_aws_signer_when_region_is_null(self): + session = self.mock_session() + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth(session, None) + assert str(e.value) == "Region cannot be empty" + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth(session, "") + assert str(e.value) == "Region cannot be empty" + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" + ) + def test_aws_signer_when_credentials_is_null(self): + region = "us-west-1" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth(None, region) + assert str(e.value) == "Credentials cannot be empty" + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth("", region) + assert str(e.value) == "Credentials cannot be empty" + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" + ) + def test_aws_signer_when_service_is_specified(self): + region = "us-west-1" + service = "aoss" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + auth = Urllib3AWSV4SignerAuth(self.mock_session(), region, service) + headers = auth("GET", "http://localhost", None) + self.assertIn("Authorization", headers) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + + def mock_session(self): + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + del dummy_session.get_frozen_credentials + + return dummy_session + + def test_uses_https_if_verify_certs_is_off(self): + with warnings.catch_warnings(record=True) as w: + con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) + self.assertEqual(1, len(w)) + self.assertEqual( + "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", + str(w[0].message), + ) + + self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) + + def test_nowarn_when_uses_https_if_verify_certs_is_off(self): + with warnings.catch_warnings(record=True) as w: + con = Urllib3HttpConnection( + use_ssl=True, verify_certs=False, ssl_show_warn=False + ) + self.assertEqual(0, len(w)) + + self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) + + def test_doesnt_use_https_if_not_specified(self): + con = Urllib3HttpConnection() + self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) + + def test_no_warning_when_using_ssl_context(self): + ctx = ssl.create_default_context() + with warnings.catch_warnings(record=True) as w: + Urllib3HttpConnection(ssl_context=ctx) + self.assertEqual(0, len(w)) + + def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): + for kwargs in ( + {"ssl_show_warn": False}, + {"ssl_show_warn": True}, + {"verify_certs": True}, + {"verify_certs": False}, + {"ca_certs": "/path/to/certs"}, + {"ssl_show_warn": True, "ca_certs": "/path/to/certs"}, + ): + kwargs["ssl_context"] = ssl.create_default_context() + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + + Urllib3HttpConnection(**kwargs) + + self.assertEqual(1, len(w)) + self.assertEqual( + "When using `ssl_context`, all other SSL related kwargs are ignored", + str(w[0].message), + ) + + def test_uses_given_ca_certs(self): + path = "/path/to/my/ca_certs.pem" + c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) + self.assertEqual(path, c.pool.ca_certs) + + def test_uses_default_ca_certs(self): + c = Urllib3HttpConnection(use_ssl=True) + self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) + + def test_uses_no_ca_certs(self): + c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) + self.assertIsNone(c.pool.ca_certs) + + @patch("opensearchpy.connection.base.logger") + def test_uncompressed_body_logged(self, logger): + con = self._get_mock_connection(connection_params={"http_compress": True}) + con.perform_request("GET", "/", body=b'{"example": "body"}') + + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + + self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) + self.assertEqual("< {}", resp[0][0] % resp[0][1:]) + + def test_surrogatepass_into_bytes(self): + buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" + con = self._get_mock_connection(response_body=buf) + status, headers, data = con.perform_request("GET", "/") + self.assertEqual(u"你好\uda6a", data) # fmt: skip + + @pytest.mark.skipif( + not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" + ) + def test_recursion_error_reraised(self): + conn = Urllib3HttpConnection() + + def urlopen_raise(*_, **__): + raise RecursionError("Wasn't modified!") + + conn.pool.urlopen = urlopen_raise + + with pytest.raises(RecursionError) as e: + conn.perform_request("GET", "/") + assert str(e.value) == "Wasn't modified!" + + +class TestSignerWithFrozenCredentials(TestUrllib3HttpConnection): + def mock_session(self): + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + dummy_session.get_frozen_credentials = Mock(return_value=dummy_session) + + return dummy_session + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" + ) + def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth(self): + region = "us-west-2" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + mock_session = self.mock_session() + + auth = Urllib3AWSV4SignerAuth(mock_session, region) + headers = auth("GET", "http://localhost", None) + self.assertIn("Authorization", headers) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + self.assertIn("X-Amz-Content-SHA256", headers) + mock_session.get_frozen_credentials.assert_called_once() From 2b164de1041ac2910fc32a1fbeff74dfb0599d21 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Tue, 24 Oct 2023 10:57:21 -0400 Subject: [PATCH 31/80] Remove support for Python 2.x. (#548) Signed-off-by: dblock Signed-off-by: roma2023 --- .ci/test-matrix.yml | 3 -- CHANGELOG.md | 1 + dev-requirements.txt | 11 ++++--- noxfile.py | 2 +- opensearchpy/__init__.py | 29 +++++++------------ opensearchpy/__init__.pyi | 19 ++++-------- opensearchpy/connection/__init__.py | 17 ++--------- opensearchpy/helpers/__init__.py | 24 +++++++-------- opensearchpy/helpers/__init__.pyi | 22 +++++--------- setup.py | 15 ++-------- test_opensearchpy/run_tests.py | 25 +++++----------- test_opensearchpy/test_async/test_signer.py | 13 --------- .../test_requests_http_connection.py | 25 +--------------- .../test_urllib3_http_connection.py | 23 --------------- test_opensearchpy/test_helpers/test_field.py | 2 -- .../test_server/test_rest_api_spec.py | 4 +-- 16 files changed, 56 insertions(+), 179 deletions(-) diff --git a/.ci/test-matrix.yml b/.ci/test-matrix.yml index c76a1bff..d63b5793 100755 --- a/.ci/test-matrix.yml +++ b/.ci/test-matrix.yml @@ -2,9 +2,6 @@ TEST_SUITE: - oss PYTHON_VERSION: - - "2.7" - - "3.4" - - "3.5" - "3.6" - "3.7" - "3.8" diff --git a/CHANGELOG.md b/CHANGELOG.md index 843abeac..eaae9670 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed +- Removed leftover support for Python 2.7 ([#548](https://github.com/opensearch-project/opensearch-py/pull/548)) ### Fixed ### Security ### Dependencies diff --git a/dev-requirements.txt b/dev-requirements.txt index 76e1acdb..04cfb3e8 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -12,14 +12,13 @@ pytz numpy; python_version<"3.10" pandas; python_version<"3.10" -pyyaml>=5.4; python_version>="3.6" -pyyaml==5.3.1; python_version<"3.6" +pyyaml>=5.4 isort -black; python_version>="3.6" +black twine # Requirements for testing [async] extra -aiohttp; python_version>="3.6" -pytest-asyncio<=0.21.1; python_version>="3.6" -unasync; python_version>="3.6" +aiohttp +pytest-asyncio<=0.21.1 +unasync diff --git a/noxfile.py b/noxfile.py index 80b4e400..6b734b48 100644 --- a/noxfile.py +++ b/noxfile.py @@ -36,7 +36,7 @@ ) -@nox.session(python=["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) +@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) def test(session): session.install(".") session.install("-r", "dev-requirements.txt") diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 6f26bc53..a0ea9f60 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -248,22 +248,15 @@ "tokenizer", ] -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError +from ._async.client import AsyncOpenSearch +from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection +from ._async.transport import AsyncTransport +from .connection import AsyncHttpConnection - from ._async.client import AsyncOpenSearch - from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection - from ._async.transport import AsyncTransport - from .connection import AsyncHttpConnection - - __all__ += [ - "AIOHttpConnection", - "AsyncConnection", - "AsyncTransport", - "AsyncOpenSearch", - "AsyncHttpConnection", - ] -except (ImportError, SyntaxError): - pass +__all__ += [ + "AIOHttpConnection", + "AsyncConnection", + "AsyncTransport", + "AsyncOpenSearch", + "AsyncHttpConnection", +] diff --git a/opensearchpy/__init__.pyi b/opensearchpy/__init__.pyi index 01fccaec..0fa4afcf 100644 --- a/opensearchpy/__init__.pyi +++ b/opensearchpy/__init__.pyi @@ -27,6 +27,10 @@ import sys from typing import Tuple +from ._async.client import AsyncOpenSearch as AsyncOpenSearch +from ._async.http_aiohttp import AIOHttpConnection as AIOHttpConnection +from ._async.http_aiohttp import AsyncConnection as AsyncConnection +from ._async.transport import AsyncTransport as AsyncTransport from .client import OpenSearch as OpenSearch from .connection import AsyncHttpConnection as AsyncHttpConnection from .connection import Connection as Connection @@ -54,6 +58,8 @@ from .exceptions import SSLError as SSLError from .exceptions import TransportError as TransportError from .exceptions import UnknownDslObject as UnknownDslObject from .exceptions import ValidationException as ValidationException +from .helpers import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth +from .helpers import AWSV4SignerAuth as AWSV4SignerAuth from .helpers.aggs import A as A from .helpers.analysis import Analyzer, CharFilter, Normalizer, TokenFilter, Tokenizer from .helpers.document import Document as Document @@ -120,19 +126,6 @@ from .helpers.wrappers import Range as Range from .serializer import JSONSerializer as JSONSerializer from .transport import Transport as Transport -try: - if sys.version_info < (3, 6): - raise ImportError - - from ._async.client import AsyncOpenSearch as AsyncOpenSearch - from ._async.http_aiohttp import AIOHttpConnection as AIOHttpConnection - from ._async.http_aiohttp import AsyncConnection as AsyncConnection - from ._async.transport import AsyncTransport as AsyncTransport - from .helpers import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth - from .helpers import AWSV4SignerAuth as AWSV4SignerAuth -except (ImportError, SyntaxError): - pass - VERSION: Tuple[int, int, int] __version__: Tuple[int, int, int] __versionstr__: str diff --git a/opensearchpy/connection/__init__.py b/opensearchpy/connection/__init__.py index 1b9ad2cd..6e331a54 100644 --- a/opensearchpy/connection/__init__.py +++ b/opensearchpy/connection/__init__.py @@ -25,9 +25,8 @@ # under the License. -import sys - from .base import Connection +from .http_async import AsyncHttpConnection from .http_requests import RequestsHttpConnection from .http_urllib3 import Urllib3HttpConnection, create_ssl_context @@ -36,17 +35,5 @@ "RequestsHttpConnection", "Urllib3HttpConnection", "create_ssl_context", + "AsyncHttpConnection", ] - -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - - from .http_async import AsyncHttpConnection - - __all__ += [ - "AsyncHttpConnection", - ] -except (ImportError, SyntaxError): - pass diff --git a/opensearchpy/helpers/__init__.py b/opensearchpy/helpers/__init__.py index 80dbf8bf..8057de7e 100644 --- a/opensearchpy/helpers/__init__.py +++ b/opensearchpy/helpers/__init__.py @@ -25,8 +25,12 @@ # under the License. -import sys - +from .._async.helpers.actions import ( + async_bulk, + async_reindex, + async_scan, + async_streaming_bulk, +) from .actions import ( _chunk_actions, _process_bulk_chunk, @@ -56,16 +60,8 @@ "AWSV4SignerAsyncAuth", "RequestsAWSV4SignerAuth", "Urllib3AWSV4SignerAuth", + "async_scan", + "async_bulk", + "async_reindex", + "async_streaming_bulk", ] - - -# Asyncio only supported on Python 3.6+ -if sys.version_info >= (3, 6): - from .._async.helpers.actions import ( - async_bulk, - async_reindex, - async_scan, - async_streaming_bulk, - ) - - __all__ += ["async_scan", "async_bulk", "async_reindex", "async_streaming_bulk"] diff --git a/opensearchpy/helpers/__init__.pyi b/opensearchpy/helpers/__init__.pyi index a4711989..01d4973c 100644 --- a/opensearchpy/helpers/__init__.pyi +++ b/opensearchpy/helpers/__init__.pyi @@ -26,6 +26,10 @@ import sys +from .._async.helpers.actions import async_bulk as async_bulk +from .._async.helpers.actions import async_reindex as async_reindex +from .._async.helpers.actions import async_scan as async_scan +from .._async.helpers.actions import async_streaming_bulk as async_streaming_bulk from .actions import _chunk_actions as _chunk_actions from .actions import _process_bulk_chunk as _process_bulk_chunk from .actions import bulk as bulk @@ -34,20 +38,8 @@ from .actions import parallel_bulk as parallel_bulk from .actions import reindex as reindex from .actions import scan as scan from .actions import streaming_bulk as streaming_bulk +from .asyncsigner import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth from .errors import BulkIndexError as BulkIndexError from .errors import ScanError as ScanError - -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - - from .._async.helpers.actions import async_bulk as async_bulk - from .._async.helpers.actions import async_reindex as async_reindex - from .._async.helpers.actions import async_scan as async_scan - from .._async.helpers.actions import async_streaming_bulk as async_streaming_bulk - from .asyncsigner import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth - from .signer import AWSV4SignerAuth as AWSV4SignerAuth - from .signer import RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth -except (ImportError, SyntaxError): - pass +from .signer import AWSV4SignerAuth as AWSV4SignerAuth +from .signer import RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth diff --git a/setup.py b/setup.py index 8bde5f40..f4163840 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,6 @@ import re -import sys from os.path import abspath, dirname, join from setuptools import find_packages, setup @@ -54,8 +53,7 @@ "requests>=2.4.0, <3.0.0", "six", "python-dateutil", - # ipaddress is included in stdlib since python 3.3 - 'ipaddress; python_version<"3.3"', + "certifi>=2022.12.07", ] tests_require = [ "requests>=2.0.0, <3.0.0", @@ -65,11 +63,9 @@ "pytest>=3.0.0", "pytest-cov", "pytz", - "botocore;python_version>='3.6'", + "botocore", + "pytest-mock<4.0.0", ] -if sys.version_info >= (3, 6): - tests_require.append("pytest-mock<4.0.0") - install_requires.append("certifi>=2022.12.07") async_require = ["aiohttp>=3,<4"] @@ -103,11 +99,6 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index e0461af7..ca9db82a 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -116,27 +116,19 @@ def run_all(argv=None): if test_pattern: argv.append("-k %s" % test_pattern) else: - ignores = [] - # Python 3.6+ is required for async - if sys.version_info < (3, 6): - ignores.append("test_opensearchpy/test_async/") - - ignores.extend( - [ - "test_opensearchpy/test_server/", - "test_opensearchpy/test_server_secured/", - "test_opensearchpy/test_async/test_server/", - "test_opensearchpy/test_async/test_server_secured/", - ] - ) + ignores = [ + "test_opensearchpy/test_server/", + "test_opensearchpy/test_server_secured/", + "test_opensearchpy/test_async/test_server/", + "test_opensearchpy/test_async/test_server_secured/", + ] # Jenkins/Github actions, only run server tests if environ.get("TEST_TYPE") == "server": test_dir = abspath(dirname(__file__)) if secured: argv.append(join(test_dir, "test_server_secured")) - if sys.version_info >= (3, 6): - argv.append(join(test_dir, "test_async/test_server_secured")) + argv.append(join(test_dir, "test_async/test_server_secured")) ignores.extend( [ "test_opensearchpy/test_server/", @@ -145,8 +137,7 @@ def run_all(argv=None): ) else: argv.append(join(test_dir, "test_server")) - if sys.version_info >= (3, 6): - argv.append(join(test_dir, "test_async/test_server")) + argv.append(join(test_dir, "test_async/test_server")) ignores.extend( [ "test_opensearchpy/test_server_secured/", diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 5b92c89d..84458c9e 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -8,7 +8,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys import uuid import pytest @@ -42,9 +41,6 @@ async def test_aws_signer_async_as_http_auth(self): assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) async def test_aws_signer_async_when_region_is_null(self): session = self.mock_session() @@ -58,9 +54,6 @@ async def test_aws_signer_async_when_region_is_null(self): AWSV4SignerAsyncAuth(session, "") assert str(e.value) == "Region cannot be empty" - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) async def test_aws_signer_async_when_credentials_is_null(self): region = "us-west-1" @@ -70,9 +63,6 @@ async def test_aws_signer_async_when_credentials_is_null(self): AWSV4SignerAsyncAuth(None, region) assert str(e.value) == "Credentials cannot be empty" - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAsyncAuth requires python3.6+" - ) async def test_aws_signer_async_when_service_is_specified(self): region = "us-west-2" service = "aoss" @@ -100,9 +90,6 @@ def mock_session(self, disable_get_frozen=True): return dummy_session - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAsyncAuth requires python3.6+" - ) async def test_aws_signer_async_frozen_credentials_as_http_auth(self): region = "us-west-2" diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index db426fa5..c85d2efd 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -28,7 +28,6 @@ import json import re -import sys import uuid import warnings @@ -36,7 +35,6 @@ from mock import Mock, patch from requests.auth import AuthBase -from opensearchpy.compat import reraise_exceptions from opensearchpy.connection import Connection, RequestsHttpConnection from opensearchpy.exceptions import ( ConflictError, @@ -44,12 +42,10 @@ RequestError, TransportError, ) +from test_opensearchpy.TestHttpServer import TestHTTPServer from ..test_cases import TestCase -if sys.version_info > (3, 0): - from test_opensearchpy.TestHttpServer import TestHTTPServer - class TestRequestsHttpConnection(TestCase): def _get_mock_connection( @@ -393,9 +389,6 @@ def test_surrogatepass_into_bytes(self): status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) def test_recursion_error_reraised(self): conn = RequestsHttpConnection() @@ -420,9 +413,6 @@ def mock_session(self): return dummy_session - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" - ) def test_aws_signer_as_http_auth(self): region = "us-west-2" @@ -440,9 +430,6 @@ def test_aws_signer_as_http_auth(self): self.assertIn("X-Amz-Security-Token", prepared_request.headers) self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" - ) def test_aws_signer_when_service_is_specified(self): region = "us-west-1" service = "aoss" @@ -460,9 +447,6 @@ def test_aws_signer_when_service_is_specified(self): self.assertIn("X-Amz-Date", prepared_request.headers) self.assertIn("X-Amz-Security-Token", prepared_request.headers) - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" - ) @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") def test_aws_signer_signs_with_query_string(self, mock_sign): region = "us-west-1" @@ -484,10 +468,6 @@ def test_aws_signer_signs_with_query_string(self, mock_sign): ) -@pytest.mark.skipif( - sys.version_info < (3, 0), - reason="http_server is only available from python 3.x", -) class TestRequestsConnectionRedirect: @classmethod def setup_class(cls): @@ -537,9 +517,6 @@ def mock_session(self): return dummy_session - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="RequestsAWSV4SignerAuth requires python3.6+" - ) def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth(self): region = "us-west-2" diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 854e22e4..929258fd 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -27,7 +27,6 @@ import ssl -import sys import uuid import warnings from gzip import GzipFile @@ -40,7 +39,6 @@ from urllib3._collections import HTTPHeaderDict from opensearchpy import __versionstr__ -from opensearchpy.compat import reraise_exceptions from opensearchpy.connection import Connection, Urllib3HttpConnection from ..test_cases import SkipTest, TestCase @@ -179,9 +177,6 @@ def test_http_auth_list(self): con.headers, ) - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" - ) @patch( "urllib3.HTTPConnectionPool.urlopen", return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), @@ -202,9 +197,6 @@ def test_aws_signer_as_http_auth_adds_headers(self, mock_open): self.assertIn("X-Amz-Security-Token", headers) self.assertIn("X-Amz-Content-SHA256", headers) - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" - ) def test_aws_signer_as_http_auth(self): region = "us-west-2" @@ -217,9 +209,6 @@ def test_aws_signer_as_http_auth(self): self.assertIn("X-Amz-Security-Token", headers) self.assertIn("X-Amz-Content-SHA256", headers) - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" - ) def test_aws_signer_when_region_is_null(self): session = self.mock_session() @@ -233,9 +222,6 @@ def test_aws_signer_when_region_is_null(self): Urllib3AWSV4SignerAuth(session, "") assert str(e.value) == "Region cannot be empty" - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" - ) def test_aws_signer_when_credentials_is_null(self): region = "us-west-1" @@ -249,9 +235,6 @@ def test_aws_signer_when_credentials_is_null(self): Urllib3AWSV4SignerAuth("", region) assert str(e.value) == "Credentials cannot be empty" - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" - ) def test_aws_signer_when_service_is_specified(self): region = "us-west-1" service = "aoss" @@ -358,9 +341,6 @@ def test_surrogatepass_into_bytes(self): status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) def test_recursion_error_reraised(self): conn = Urllib3HttpConnection() @@ -387,9 +367,6 @@ def mock_session(self): return dummy_session - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="Urllib3AWSV4SignerAuth requires python3.6+" - ) def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth(self): region = "us-west-2" diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index 15b51c52..288eab3a 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -25,7 +25,6 @@ # under the License. import base64 -import sys from datetime import datetime from ipaddress import ip_address @@ -150,7 +149,6 @@ def test_scaled_float(): assert f.to_dict() == {"scaling_factor": 123, "type": "scaled_float"} -@pytest.mark.skipif(sys.version_info < (3, 6), reason="requires python3.6 or higher") def test_ipaddress(): f = field.Ip() assert f.deserialize("127.0.0.1") == ip_address("127.0.0.1") diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index b5d890ab..306993f2 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -33,7 +33,6 @@ import io import os import re -import sys import warnings import zipfile @@ -135,8 +134,7 @@ OPENSEARCH_VERSION = None RUN_ASYNC_REST_API_TESTS = ( - sys.version_info >= (3, 6) - and os.environ.get("PYTHON_CONNECTION_CLASS") == "RequestsHttpConnection" + os.environ.get("PYTHON_CONNECTION_CLASS") == "RequestsHttpConnection" ) FALSEY_VALUES = ("", None, False, 0, 0.0) From b0f1136429d7139d7951f42b52550769872600c4 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Wed, 25 Oct 2023 08:41:50 -0400 Subject: [PATCH 32/80] Remove PY2. (#550) Signed-off-by: dblock Signed-off-by: roma2023 --- opensearchpy/client/utils.py | 6 +-- opensearchpy/compat.py | 42 ++++++------------- opensearchpy/compat.pyi | 1 - test_opensearchpy/test_client/test_utils.py | 11 +---- .../test_connection/test_base_connection.py | 17 -------- 5 files changed, 15 insertions(+), 62 deletions(-) diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index a5e99b11..aaa07995 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -32,7 +32,7 @@ from datetime import date, datetime from functools import wraps -from ..compat import PY2, quote, string_types, to_bytes, to_str, unquote, urlparse +from ..compat import quote, string_types, to_bytes, to_str, unquote, urlparse # parts of URL to be omitted SKIP_IN_PATH = (None, "", b"", [], ()) @@ -107,9 +107,7 @@ def _escape(value): # encode strings to utf-8 if isinstance(value, string_types): - if PY2 and isinstance(value, unicode): # noqa: F821 - return value.encode("utf-8") - if not PY2 and isinstance(value, str): + if isinstance(value, str): return value.encode("utf-8") return str(value) diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index a5169050..4f74c740 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -25,41 +25,23 @@ # under the License. -import sys +from queue import Queue +from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse -PY2 = sys.version_info[0] == 2 +string_types = str, bytes +map = map -if PY2: - string_types = (basestring,) # noqa: F821 - from itertools import imap as map - from urllib import quote, quote_plus, unquote, urlencode - from Queue import Queue - from urlparse import urlparse +def to_str(x, encoding="ascii"): + if not isinstance(x, str): + return x.decode(encoding) + return x - def to_str(x, encoding="ascii"): - if not isinstance(x, str): - return x.encode(encoding) - return x - to_bytes = to_str - -else: - string_types = str, bytes - from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse - - map = map - from queue import Queue - - def to_str(x, encoding="ascii"): - if not isinstance(x, str): - return x.decode(encoding) - return x - - def to_bytes(x, encoding="ascii"): - if not isinstance(x, bytes): - return x.encode(encoding) - return x +def to_bytes(x, encoding="ascii"): + if not isinstance(x, bytes): + return x.encode(encoding) + return x try: diff --git a/opensearchpy/compat.pyi b/opensearchpy/compat.pyi index d3dc0a08..c9607668 100644 --- a/opensearchpy/compat.pyi +++ b/opensearchpy/compat.pyi @@ -27,7 +27,6 @@ import sys from typing import Callable, Tuple, Type, Union -PY2: bool string_types: Tuple[type, ...] to_str: Callable[[Union[str, bytes]], str] diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index 1a4b6809..888e988d 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -29,9 +29,8 @@ from __future__ import unicode_literals from opensearchpy.client.utils import _bulk_body, _escape, _make_path, query_params -from opensearchpy.compat import PY2 -from ..test_cases import SkipTest, TestCase +from ..test_cases import TestCase class TestQueryParams(TestCase): @@ -161,14 +160,6 @@ def test_handles_unicode(self): "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) ) - def test_handles_utf_encoded_string(self): - if not PY2: - raise SkipTest("Only relevant for py2") - id = "中文".encode("utf-8") - self.assertEqual( - "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) - ) - class TestEscape(TestCase): def test_handles_ascii(self): diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py index 63729206..2c0a3fef 100644 --- a/test_opensearchpy/test_connection/test_base_connection.py +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -28,13 +28,9 @@ import os import sys -import unittest import warnings -import six - from opensearchpy.connection import Connection -from opensearchpy.exceptions import NotFoundError from ..test_cases import TestCase @@ -92,19 +88,6 @@ def test_raises_warnings_when_folded(self): self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) - @unittest.skipIf(six.PY2, "not compatible with python2") - def test_raises_errors(self): - con = Connection() - with self.assertLogs("opensearch") as captured, self.assertRaises( - NotFoundError - ): - con._raise_error(404, "Not found", "application/json") - self.assertEqual(len(captured.output), 1) - - # NB: this should assertNoLogs() but that method is not available until python3.10 - with self.assertRaises(NotFoundError): - con._raise_error(404, "Not found", "text/plain; charset=UTF-8") - def test_ipv6_host_and_port(self): for kwargs, expected_host in [ ({"host": "::1"}, "http://[::1]:9200"), From 2ddac11b1909a321027f4bfb919987ad98899a77 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Thu, 26 Oct 2023 07:58:33 -0700 Subject: [PATCH 33/80] Generate opensearch-py client from API specs (#551) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/client/__init__.py | 1087 ++++++++-------- opensearchpy/_async/client/__init__.pyi | 35 +- opensearchpy/_async/client/cat.py | 276 ++-- opensearchpy/_async/client/cluster.py | 102 +- opensearchpy/_async/client/cluster.pyi | 1 + .../_async/client/dangling_indices.py | 8 +- opensearchpy/_async/client/indices.py | 1113 +++++++--------- opensearchpy/_async/client/indices.pyi | 232 +--- opensearchpy/_async/client/ingest.py | 14 +- opensearchpy/_async/client/nodes.py | 34 +- opensearchpy/_async/client/security.py | 219 +++- opensearchpy/_async/client/security.pyi | 713 ++++++++++- opensearchpy/_async/client/snapshot.py | 249 ++-- opensearchpy/_async/client/snapshot.pyi | 61 +- opensearchpy/_async/client/tasks.py | 11 +- opensearchpy/client/__init__.py | 1089 ++++++++-------- opensearchpy/client/__init__.pyi | 40 +- opensearchpy/client/_patch.pyi | 1 + opensearchpy/client/cat.py | 276 ++-- opensearchpy/client/cluster.py | 102 +- opensearchpy/client/cluster.pyi | 1 + opensearchpy/client/dangling_indices.py | 8 +- opensearchpy/client/indices.py | 1137 +++++++---------- opensearchpy/client/indices.pyi | 232 +--- opensearchpy/client/ingest.py | 14 +- opensearchpy/client/nodes.py | 34 +- opensearchpy/client/security.py | 221 +++- opensearchpy/client/security.pyi | 893 ++++++++++--- opensearchpy/client/snapshot.py | 249 ++-- opensearchpy/client/snapshot.pyi | 61 +- opensearchpy/client/tasks.py | 11 +- utils/generate-api.py | 20 +- utils/templates/base | 6 +- 34 files changed, 4540 insertions(+), 4011 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index eaae9670..9c55a5d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Generate `cluster` client from API specs ([#530](https://github.com/opensearch-project/opensearch-py/pull/530)) - Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) - Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) +- Use API generator for all APIs ([#551](https://github.com/opensearch-project/opensearch-py/pull/551)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 2440b291..7f0d4f98 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -25,6 +25,17 @@ # specific language governing permissions and limitations # under the License. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from __future__ import unicode_literals import logging @@ -274,25 +285,25 @@ async def create(self, index, id, body, params=None, headers=None): with a same ID already exists in the index. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The document :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -322,46 +333,42 @@ async def index(self, index, body, id=None, params=None, headers=None): Creates or updates a document in an index. - :arg index: The name of the index + :arg index: Index name. :arg body: The document - :arg id: Document ID - :arg if_primary_term: only perform the index operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the index operation if the last - operation that has changed the document has the specified sequence - number + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg op_type: Explicit operation type. Defaults to `index` for requests with an explicit document ID, and to `create`for requests - without an explicit document ID Valid choices: index, create + without an explicit document ID. Valid choices are index, create. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: When true, requires destination to be an - alias. Default is false - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + alias. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( "POST" if id in SKIP_IN_PATH else "PUT", - _make_path(index, doc_type, id), + _make_path(index, "_doc", id), params=params, headers=headers, body=body, @@ -385,29 +392,29 @@ async def bulk(self, body, index=None, params=None, headers=None): :arg body: The operation definition and data (action-data pairs), separated by newlines - :arg index: Default index for items which don't provide one + :arg index: Default index for items which don't provide one. :arg _source: True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub- - request + request. :arg _source_excludes: Default list of fields to exclude from - the returned _source field, can be overridden on each sub-request + the returned _source field, can be overridden on each sub-request. :arg _source_includes: Default list of fields to extract and - return from the _source field, can be overridden on each sub-request + return from the _source field, can be overridden on each sub-request. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: Sets require_alias for all incoming - documents. Defaults to unset (false) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + documents. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the bulk operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -427,9 +434,9 @@ async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=Non Explicitly clears the search context for a scroll. - :arg body: A comma-separated list of scroll IDs to clear if none + :arg body: Comma-separated list of scroll IDs to clear if none was specified via the scroll_id parameter - :arg scroll_id: A comma-separated list of scroll IDs to clear + :arg scroll_id: Comma-separated list of scroll IDs to clear. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -463,37 +470,38 @@ async def count(self, body=None, index=None, params=None, headers=None): Returns number of documents matching a query. - :arg body: A query to restrict the results specified with the + :arg body: Query to restrict the results specified with the Query DSL (optional) - :arg index: A comma-separated list of indices to restrict the - results + :arg index: Comma-separated list of indices to restrict the + results. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg min_score: Include only documents with a specific `_score` - value in the result + value in the result. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: A comma-separated list of specific routing values - :arg terminate_after: The maximum count for each shard, upon - reaching which the query execution will terminate early + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Comma-separated list of specific routing values. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. """ return await self.transport.perform_request( "POST", @@ -518,37 +526,33 @@ async def delete(self, index, id, params=None, headers=None): Removes a document from the index. - :arg index: The name of the index - :arg id: The document ID - :arg if_primary_term: only perform the delete operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the delete operation if the last - operation that has changed the document has the specified sequence - number + :arg index: Index name. + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( - "DELETE", _make_path(index, doc_type, id), params=params, headers=headers + "DELETE", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -591,76 +595,76 @@ async def delete_by_query(self, index, body, params=None, headers=None): Deletes documents matching the provided query. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the delete by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) + all documents). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the effected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Refresh the shard containing the document before + performing the operation. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the delete - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the delete by query is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -685,9 +689,9 @@ async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -699,16 +703,19 @@ async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def delete_script(self, id, params=None, headers=None): """ Deletes a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -734,35 +741,33 @@ async def exists(self, index, id, params=None, headers=None): Returns information about whether a document exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( - "HEAD", _make_path(index, doc_type, id), params=params, headers=headers + "HEAD", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -781,24 +786,24 @@ async def exists_source(self, index, id, params=None, headers=None): Returns information about whether a document source exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -829,30 +834,30 @@ async def explain(self, index, id, body=None, params=None, headers=None): Returns information about why a specific matches (or doesn't match) a query. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The query definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg analyze_wildcard: Specify whether wildcards and prefix - queries in the query string query should be analyzed (default: false) - :arg analyzer: The analyzer for the query string query + queries in the query string query should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR - :arg df: The default field for query string query (default: - _all) + query (AND or OR). Valid choices are AND, OR. + :arg df: The default field for query string query. Default is + _all. :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -878,19 +883,19 @@ async def field_caps(self, body=None, index=None, params=None, headers=None): :arg body: An index filter specified with the Query DSL - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of field names + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fields: Comma-separated list of field names. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_unmapped: Indicates whether unmapped fields should - be included in the response. + be included in the response. Default is false. """ return await self.transport.perform_request( "POST", @@ -917,46 +922,47 @@ async def get(self, index, id, params=None, headers=None): Returns a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( - "GET", _make_path(index, doc_type, id), params=params, headers=headers + "GET", _make_path(index, "_doc", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") async def get_script(self, id, params=None, headers=None): """ Returns a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -981,24 +987,24 @@ async def get_source(self, index, id, params=None, headers=None): Returns the source of a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -1026,24 +1032,24 @@ async def mget(self, body, index=None, params=None, headers=None): :arg body: Document identifiers; can be either `docs` - (containing full document information) or `ids` (when index and type is - provided in the URL. - :arg index: The name of the index + (containing full document information) or `ids` (when index is provided + in the URL. + :arg index: Index name. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1072,30 +1078,31 @@ async def msearch(self, body, index=None, params=None, headers=None): :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg max_concurrent_shard_requests: The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in - order to limit the number of concurrent shard requests Default: 5 - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + order to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1123,19 +1130,20 @@ async def msearch_template(self, body, index=None, params=None, headers=None): :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1175,34 +1183,34 @@ async def mtermvectors(self, body=None, index=None, params=None, headers=None): :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Applies to all returned documents unless otherwise specified - in body "params" or "docs". Default: True - :arg fields: A comma-separated list of fields to return. Applies - to all returned documents unless otherwise specified in body "params" or - "docs". - :arg ids: A comma-separated list of documents ids. You must - define ids as parameter or set "ids" or "docs" in the request body + in body 'params' or 'docs'. Default is True. + :arg fields: Comma-separated list of fields to return. Applies + to all returned documents unless otherwise specified in body 'params' or + 'docs'. + :arg ids: Comma-separated list of documents ids. You must define + ids as parameter or set 'ids' or 'docs' in the request body. :arg offsets: Specifies if term offsets should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg payloads: Specifies if term payloads should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg positions: Specifies if term positions should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random) .Applies to all returned documents - unless otherwise specified in body "params" or "docs". + be performed on. Applies to all returned documents unless otherwise + specified in body 'params' or 'docs'. Default is random. :arg realtime: Specifies if requests are real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. Applies to all returned - documents unless otherwise specified in body "params" or "docs". + near-real-time. Default is True. + :arg routing: Routing value. Applies to all returned documents + unless otherwise specified in body 'params' or 'docs'. :arg term_statistics: Specifies if total term frequency and document frequency should be returned. Applies to all returned documents - unless otherwise specified in body "params" or "docs". - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + unless otherwise specified in body 'params' or 'docs'. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ path = _make_path(index, "_mtermvectors") @@ -1210,18 +1218,21 @@ async def mtermvectors(self, body=None, index=None, params=None, headers=None): "POST", path, params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def put_script(self, id, body, context=None, params=None, headers=None): """ Creates or updates a script. - :arg id: Script ID + :arg id: Script ID. :arg body: The document - :arg context: Context name to compile script against - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg context: Script context. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -1241,28 +1252,23 @@ async def put_script(self, id, body, context=None, params=None, headers=None): async def rank_eval(self, body, index=None, params=None, headers=None): """ Allows to evaluate the quality of ranked search results over a set of typical - search queries - + search queries. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version :arg body: The ranking evaluation search definition, including search requests, document ratings and ranking metric definition. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be ignored when unavailable (missing or closed). + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1295,24 +1301,24 @@ async def reindex(self, body, params=None, headers=None): :arg body: The search definition using the Query DSL and the prototype for the index request. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg refresh: Should the affected indexes be refreshed? - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg scroll: Control how long to keep the search context alive - Default: 5m + all documents). + :arg refresh: Should the affected indexes be refreshed?. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 + set to `auto`. Default is 1. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m + shards that are unavailable. Default is 1m. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the reindex operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) - :arg wait_for_completion: Should the request should block until - the reindex is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1327,9 +1333,9 @@ async def reindex_rethrottle(self, task_id, params=None, headers=None): Changes the number of requests per second for a particular Reindex operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1350,7 +1356,7 @@ async def render_search_template( :arg body: The search definition template and its params - :arg id: The id of the stored search template + :arg id: The id of the stored search template. """ return await self.transport.perform_request( "POST", @@ -1363,13 +1369,8 @@ async def render_search_template( @query_params() async def scripts_painless_execute(self, body=None, params=None, headers=None): """ - Allows an arbitrary script to be executed and a result to be returned - - - .. warning:: + Allows an arbitrary script to be executed and a result to be returned. - This API is **experimental** so may include breaking changes - or be removed in a future version :arg body: The script to execute """ @@ -1389,11 +1390,12 @@ async def scroll(self, body=None, scroll_id=None, params=None, headers=None): :arg body: The scroll ID if not passed by URL or query parameter. - :arg scroll_id: The scroll ID for scrolled search + :arg scroll_id: Scroll ID. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response + be rendered as an integer or an object in the rest search response. + Default is false. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search + should be maintained for scrolled search. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -1426,7 +1428,6 @@ async def scroll(self, body=None, scroll_id=None, params=None, headers=None): "ignore_unavailable", "lenient", "max_concurrent_shard_requests", - "min_compatible_shard_node", "pre_filter_shard_size", "preference", "q", @@ -1457,101 +1458,99 @@ async def search(self, body=None, index=None, params=None, headers=None): :arg body: The search definition using the Query DSL - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg allow_partial_search_results: Indicate if an error should - be returned if there is a partial search failure or timeout Default: - True + be returned if there is a partial search failure or timeout. Default is + True. :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large. - Default: 512 + Default is 512. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string - :arg docvalue_fields: A comma-separated list of fields to return - as the docvalue representation of a field for each hit + given in the query string. + :arg docvalue_fields: Comma-separated list of fields to return + as the docvalue representation of a field for each hit. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit - :arg from_: Starting offset (default: 0) + about score computation as part of a hit. + :arg from_: Starting offset. Default is 0. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order - to limit the number of concurrent shard requests Default: 5 - :arg min_compatible_shard_node: The minimum compatible version - that all shards involved in search should have for this request to be - successful - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. :arg seq_no_primary_term: Specify whether to return sequence - number and primary term of the last modification of each hit - :arg size: Number of hits to return (default: 10) - :arg sort: A comma-separated list of : pairs + number and primary term of the last modification of each hit. + :arg size: Number of hits to return. Default is 10. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes - :arg stored_fields: A comma-separated list of stored fields to - return as part of a hit - :arg suggest_field: Specify which field to use for suggestions - :arg suggest_mode: Specify suggest mode Valid choices: missing, - popular, always Default: missing - :arg suggest_size: How many suggestions to return in response + statistical purposes. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg suggest_field: Specify which field to use for suggestions. + :arg suggest_mode: Specify suggest mode. Valid choices are + missing, popular, always. + :arg suggest_size: How many suggestions to return in response. :arg suggest_text: The source text for which the suggestions - should be returned + should be returned. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. :arg track_scores: Whether to calculate and return scores even - if they are not used for sorting + if they are not used for sorting. :arg track_total_hits: Indicate if the number of documents that - match the query should be tracked + match the query should be tracked. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response - :arg version: Specify whether to return document version as part - of a hit + should be prefixed by their respective types in the response. + :arg version: Whether to return document version as part of a + hit. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1579,21 +1578,21 @@ async def search_shards(self, index=None, params=None, headers=None): executed against. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + be performed on. Default is random. + :arg routing: Routing value. """ return await self.transport.perform_request( "GET", _make_path(index, "_search_shards"), params=params, headers=headers @@ -1620,35 +1619,37 @@ async def search_template(self, body, index=None, params=None, headers=None): :arg body: The search definition template and its params - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit + about score computation as part of a hit. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg profile: Specify whether to profile the query execution + be performed on. Default is random. + :arg profile: Specify whether to profile the query execution. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1683,28 +1684,28 @@ async def termvectors(self, index, body=None, id=None, params=None, headers=None :arg index: The index in which the document resides. :arg body: Define parameters and or supply a document to get termvectors for. See documentation. - :arg id: The id of the document, when not specified a doc param - should be supplied. + :arg id: Document ID. When not specified a doc param should be + supplied. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be - returned. Default: True - :arg fields: A comma-separated list of fields to return. + returned. Default is True. + :arg fields: Comma-separated list of fields to return. :arg offsets: Specifies if term offsets should be returned. - Default: True + Default is True. :arg payloads: Specifies if term payloads should be returned. - Default: True + Default is True. :arg positions: Specifies if term positions should be returned. - Default: True + Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random). + be performed on. Default is random. :arg realtime: Specifies if request is real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. + near-real-time. Default is True. + :arg routing: Routing value. :arg term_statistics: Specifies if total term frequency and - document frequency should be returned. - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + document frequency should be returned. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -1734,38 +1735,36 @@ async def update(self, index, id, body, params=None, headers=None): Updates a document with a script or partial document. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The request definition requires either `script` or partial `doc` :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field - :arg if_primary_term: only perform the update operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the update operation if the last - operation that has changed the document has the specified sequence - number - :arg lang: The script language (default: painless) + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. + :arg lang: The script language. Default is painless. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg require_alias: When true, requires destination is an alias. - Default is false + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg require_alias: When true, requires destination to be an + alias. Default is false. :arg retry_on_conflict: Specify how many times should the - operation be retried when a conflict occurs (default: 0) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + operation be retried when a conflict occurs. Default is 0. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -1810,7 +1809,6 @@ async def update(self, index, id, body, params=None, headers=None): "terminate_after", "timeout", "version", - "version_type", "wait_for_active_shards", "wait_for_completion", ) @@ -1820,80 +1818,77 @@ async def update_by_query(self, index, body=None, params=None, headers=None): for example to pick up a mapping change. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the update by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg pipeline: Ingest pipeline to set on index requests made by - this action. (default: none) + all documents). + :arg pipeline: The pipeline id to preprocess incoming documents + with. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the affected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Should the affected indexes be refreshed?. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + this request or not, defaults to index level setting. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the update - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit - :arg version_type: Should the document increment the version - number (internal) on hit or not (reindex) + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the update by query operation is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1917,9 +1912,9 @@ async def update_by_query_rethrottle(self, task_id, params=None, headers=None): operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1936,11 +1931,6 @@ async def get_script_context(self, params=None, headers=None): """ Returns all script contexts. - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version """ return await self.transport.perform_request( "GET", "/_script_context", params=params, headers=headers @@ -1949,13 +1939,8 @@ async def get_script_context(self, params=None, headers=None): @query_params() async def get_script_languages(self, params=None, headers=None): """ - Returns available script types, languages and contexts - - - .. warning:: + Returns available script types, languages and contexts. - This API is **experimental** so may include breaking changes - or be removed in a future version """ return await self.transport.perform_request( "GET", "/_script_language", params=params, headers=headers @@ -1978,11 +1963,11 @@ async def create_pit(self, index, params=None, headers=None): :arg allow_partial_pit_creation: Allow if point in time can be created with partial failures. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg keep_alive: Specify the keep alive for point in time. :arg preference: Specify the node or shard the operation should - be performed on. + be performed on. Default is random. :arg routing: Comma-separated list of specific routing values. """ if index in SKIP_IN_PATH: @@ -2011,7 +1996,7 @@ async def delete_pit(self, body=None, params=None, headers=None): Deletes one or more point in time searches based on the IDs passed. - :arg body: a point-in-time id to delete + :arg body: The point-in-time ids to be deleted """ return await self.transport.perform_request( "DELETE", @@ -2025,36 +2010,8 @@ async def delete_pit(self, body=None, params=None, headers=None): async def get_all_pits(self, params=None, headers=None): """ Lists all active point in time searches. - """ - return await self.transport.perform_request( - "GET", "/_search/point_in_time/_all", params=params, headers=headers - ) - - @query_params() - async def terms_enum(self, index, body=None, params=None, headers=None): - """ - The terms enum API can be used to discover terms in the index that begin with - the provided string. It is designed for low-latency look-ups used in auto- - complete scenarios. - - - .. warning:: - This API is **beta** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices - :arg body: field name, string which is the prefix expected in - matching terms, timeout and size for max number of results """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - return await self.transport.perform_request( - "POST", - _make_path(index, "_terms_enum"), - params=params, - headers=headers, - body=body, + "GET", "/_search/point_in_time/_all", params=params, headers=headers ) diff --git a/opensearchpy/_async/client/__init__.pyi b/opensearchpy/_async/client/__init__.pyi index a016d791..70a93d19 100644 --- a/opensearchpy/_async/client/__init__.pyi +++ b/opensearchpy/_async/client/__init__.pyi @@ -25,6 +25,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from __future__ import unicode_literals import logging @@ -333,8 +342,8 @@ class AsyncOpenSearch(object): self, id: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -490,8 +499,8 @@ class AsyncOpenSearch(object): self, id: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -642,8 +651,8 @@ class AsyncOpenSearch(object): *, body: Any, context: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -802,7 +811,6 @@ class AsyncOpenSearch(object): ignore_unavailable: Optional[Any] = ..., lenient: Optional[Any] = ..., max_concurrent_shard_requests: Optional[Any] = ..., - min_compatible_shard_node: Optional[Any] = ..., pre_filter_shard_size: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., @@ -991,7 +999,6 @@ class AsyncOpenSearch(object): terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., version: Optional[Any] = ..., - version_type: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -1128,21 +1135,3 @@ class AsyncOpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def terms_enum( - self, - index: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index a4dd9786..d2864097 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -48,17 +48,17 @@ async def aliases(self, name=None, params=None, headers=None): :arg name: Comma-separated list of alias names. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers @@ -83,22 +83,22 @@ async def allocation(self, node_id=None, params=None, headers=None): :arg node_id: Comma-separated list of node IDs or names to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", @@ -119,10 +119,10 @@ async def count(self, index=None, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers @@ -137,13 +137,13 @@ async def health(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg ts: Set to false to disable timestamping. (default: True) - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg ts: Set to false to disable timestamping. Default is True. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers @@ -155,7 +155,7 @@ async def help(self, params=None, headers=None): Returns help for the Cat APIs. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. """ @@ -187,35 +187,35 @@ async def indices(self, index=None, params=None, headers=None): :arg index: Comma-separated list of indices to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg health: Health status ('green', 'yellow', or 'red') to - filter only indices matching the specified health status. Valid - choices: green, yellow, red - :arg help: Return help information. (default: false) + filter only indices matching the specified health status. Valid choices + are green, yellow, red. + :arg help: Return help information. Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory. (default: false) + memory. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg pri: Set to true to return stats only for primary shards. - (default: false) + Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers @@ -241,15 +241,15 @@ async def master(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ from warnings import warn @@ -280,15 +280,15 @@ async def cluster_manager(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/cluster_manager", params=params, headers=headers @@ -312,27 +312,27 @@ async def nodes(self, params=None, headers=None): Returns basic statistics about performance of cluster nodes. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg full_id: Return the full node ID instead of the shortened - version. (default: false) + version. Default is false. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local (Deprecated: This parameter does not cause this API - to act locally): Return local information, do not retrieve the state - from cluster-manager node. (default: false) + to act locally.): Return local information, do not retrieve the state + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers @@ -349,20 +349,20 @@ async def recovery(self, index=None, params=None, headers=None): :arg index: Comma-separated list or wildcard expression of index names to limit the returned information. :arg active_only: If `true`, the response only includes ongoing - shard recoveries. (default: false) - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg detailed: If `true`, the response includes detailed - information about shard recoveries. (default: false) + information about shard recoveries. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "recovery", index), params=params, headers=headers @@ -387,24 +387,24 @@ async def shards(self, index=None, params=None, headers=None): :arg index: Comma-separated list of indices to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "shards", index), params=params, headers=headers @@ -427,20 +427,20 @@ async def segments(self, index=None, params=None, headers=None): :arg index: Comma-separated list of indices to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "segments", index), params=params, headers=headers @@ -467,17 +467,17 @@ async def pending_tasks(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/pending_tasks", params=params, headers=headers @@ -507,16 +507,16 @@ async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. :arg size: The multiplier in which to display values. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", @@ -534,15 +534,15 @@ async def fielddata(self, fields=None, params=None, headers=None): :arg fields: Comma-separated list of fields to return in the output. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", @@ -571,15 +571,15 @@ async def plugins(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/plugins", params=params, headers=headers @@ -605,15 +605,15 @@ async def nodeattrs(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/nodeattrs", params=params, headers=headers @@ -639,15 +639,15 @@ async def repositories(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/repositories", params=params, headers=headers @@ -675,17 +675,18 @@ async def snapshots(self, repository=None, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). (default: false) + should be ignored when unavailable (missing or closed). Default is + false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", @@ -714,12 +715,12 @@ async def tasks(self, params=None, headers=None): :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information. (default: - false) + :arg detailed: Return detailed task information. Default is + false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all @@ -728,9 +729,9 @@ async def tasks(self, params=None, headers=None): (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers @@ -757,15 +758,15 @@ async def templates(self, name=None, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers @@ -787,7 +788,6 @@ async def pit_segments(self, body=None, params=None, headers=None): List segments for one or several PITs. - :arg body: """ return await self.transport.perform_request( "GET", "/_cat/pit_segments", params=params, headers=headers, body=body @@ -815,23 +815,23 @@ async def segment_replication(self, index=None, params=None, headers=None): :arg index: Comma-separated list or wildcard expression of index names to limit the returned information. :arg active_only: If `true`, the response only includes ongoing - segment replication events. (default: false) - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + segment replication events. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg completed_only: If `true`, the response only includes - latest completed segment replication events. (default: false) + latest completed segment replication events. Default is false. :arg detailed: If `true`, the response includes detailed - information about segment replications. (default: false) + information about segment replications. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. :arg shards: Comma-separated list of shards to display. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index b64bdc5b..7c9c5f46 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -65,22 +65,22 @@ async def health(self, index=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg level: Specify the level of detail for returned - information. Valid choices: cluster, indices, shards, - awareness_attributes + information. Valid choices are cluster, indices, shards, + awareness_attributes. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. :arg wait_for_active_shards: Wait until the specified number of shards is active. :arg wait_for_events: Wait until all currently queued events - with the given priority are processed. Valid choices: immediate, - urgent, high, normal, low, languid + with the given priority are processed. Valid choices are immediate, + urgent, high, normal, low, languid. :arg wait_for_no_initializing_shards: Whether to wait until there are no initializing shards in the cluster. :arg wait_for_no_relocating_shards: Whether to wait until there @@ -88,7 +88,7 @@ async def health(self, index=None, params=None, headers=None): :arg wait_for_nodes: Wait until the specified number of nodes is available. :arg wait_for_status: Wait until cluster is in a specific state. - Valid choices: green, yellow, red + Valid choices are green, yellow, red. """ return await self.transport.perform_request( "GET", @@ -107,10 +107,10 @@ async def pending_tasks(self, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers @@ -133,8 +133,8 @@ async def state(self, metric=None, index=None, params=None, headers=None): :arg metric: Limit the information returned to the specified - metrics. Valid choices: _all, blocks, metadata, nodes, routing_table, - routing_nodes, master_node, cluster_manager_node, version + metrics. Valid choices are _all, blocks, metadata, nodes, routing_table, + routing_nodes, master_node, cluster_manager_node, version. :arg index: Comma-separated list of indices; use `_all` or empty string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices @@ -143,17 +143,17 @@ async def state(self, metric=None, index=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none - :arg flat_settings: Return settings in flat format. (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_metadata_version: Wait for the metadata version to be equal or greater than the specified metadata version. :arg wait_for_timeout: The maximum time to wait for @@ -179,8 +179,8 @@ async def stats(self, node_id=None, params=None, headers=None): the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes. - :arg flat_settings: Return settings in flat format. (default: - false) + :arg flat_settings: Return settings in flat format. Default is + false. :arg timeout: Operation timeout. """ return await self.transport.perform_request( @@ -215,8 +215,8 @@ async def reroute(self, body=None, params=None, headers=None): :arg explain: Return an explanation of why the commands can or cannot be executed. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg metric: Limit the information returned to the specified metrics. Defaults to all but metadata. :arg retry_failed: Retries allocation of shards that are blocked @@ -241,13 +241,13 @@ async def get_settings(self, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. - :arg flat_settings: Return settings in flat format. (default: - false) + :arg flat_settings: Return settings in flat format. Default is + false. :arg include_defaults: Whether to return all default clusters - setting. (default: false) + setting. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ return await self.transport.perform_request( @@ -266,11 +266,11 @@ async def put_settings(self, body, params=None, headers=None): or `persistent` (survives cluster restart). :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. - :arg flat_settings: Return settings in flat format. (default: - false) + :arg flat_settings: Return settings in flat format. Default is + false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: @@ -299,9 +299,9 @@ async def allocation_explain(self, body=None, params=None, headers=None): :arg body: The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard' :arg include_disk_info: Return information about disk usage and - shard sizes. (default: false) + shard sizes. Default is false. :arg include_yes_decisions: Return 'YES' decisions in - explanation. (default: false) + explanation. Default is false. """ return await self.transport.perform_request( "POST", @@ -321,8 +321,8 @@ async def delete_component_template(self, name, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: @@ -345,10 +345,10 @@ async def get_component_template(self, name=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", @@ -368,10 +368,10 @@ async def put_component_template(self, name, body, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one. (default: false) + new or can also replace an existing one. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ for param in (name, body): @@ -386,18 +386,20 @@ async def put_component_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("local", "master_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") async def exists_component_template(self, name, params=None, headers=None): """ Returns information about whether a particular component template exist. :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -417,7 +419,7 @@ async def delete_voting_config_exclusions(self, params=None, headers=None): :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting - configuration exclusions list. (default: True) + configuration exclusions list. Default is True. """ return await self.transport.perform_request( "DELETE", diff --git a/opensearchpy/_async/client/cluster.pyi b/opensearchpy/_async/client/cluster.pyi index 2685cbb5..b75ec46c 100644 --- a/opensearchpy/_async/client/cluster.pyi +++ b/opensearchpy/_async/client/cluster.pyi @@ -300,6 +300,7 @@ class ClusterClient(NamespacedClient): self, name: Any, *, + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index cf382c52..b284ac27 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -53,8 +53,8 @@ async def delete_dangling_index(self, index_uuid, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: @@ -81,8 +81,8 @@ async def import_dangling_index(self, index_uuid, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index d58a3fb5..cfc48db4 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -38,7 +48,7 @@ async def analyze(self, body=None, index=None, params=None, headers=None): :arg body: Define analyzer/tokenizer parameters and the text on which the analysis should be performed - :arg index: The name of the index to scope the operation + :arg index: The name of the index to scope the operation. """ return await self.transport.perform_request( "POST", @@ -54,16 +64,16 @@ async def refresh(self, index=None, params=None, headers=None): Performs the refresh operation in one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return await self.transport.perform_request( "POST", _make_path(index, "_refresh"), params=params, headers=headers @@ -81,44 +91,47 @@ async def flush(self, index=None, params=None, headers=None): Performs the flush operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string for all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg force: Whether a flush should be forced even if it is not necessarily needed ie. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as - internal) + internal). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg wait_if_ongoing: If set to true the flush operation will block until the flush can be executed if another flush operation is - already executing. The default is true. If set to false the flush will - be skipped iff if another flush operation is already running. + already executing. If set to false the flush will be skipped iff if + another flush operation is already running. Default is True. """ return await self.transport.perform_request( "POST", _make_path(index, "_flush"), params=params, headers=headers ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) async def create(self, index, body=None, params=None, headers=None): """ Creates an index with optional settings and mappings. - :arg index: The name of the index + :arg index: Index name. :arg body: The configuration for the index (`settings` and `mappings`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for before the operation returns. """ @@ -130,20 +143,23 @@ async def create(self, index, body=None, params=None, headers=None): ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) async def clone(self, index, target, body=None, params=None, headers=None): """ - Clones an index + Clones an index. - :arg index: The name of the source index to clone - :arg target: The name of the target index to clone into + :arg index: The name of the source index to clone. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the cloned index before the operation returns. """ @@ -161,35 +177,40 @@ async def clone(self, index, target, body=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) async def get(self, index, params=None, headers=None): """ Returns information about one or more indices. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -200,10 +221,10 @@ async def get(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -212,18 +233,21 @@ async def open(self, index, params=None, headers=None): Opens an index. - :arg index: A comma separated list of indices to open + :arg index: Comma-separated list of indices to open. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ @@ -236,10 +260,10 @@ async def open(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -248,22 +272,23 @@ async def close(self, index, params=None, headers=None): Closes an index. - :arg index: A comma separated list of indices to close + :arg index: Comma-separated list of indices to close. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. Set to `index-setting` to wait - according to the index setting `index.write.wait_for_active_shards`, or - `all` to wait for all shards, or an integer. Defaults to `0`. + wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -274,10 +299,10 @@ async def close(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) async def delete(self, index, params=None, headers=None): @@ -285,18 +310,23 @@ async def delete(self, index, params=None, headers=None): Deletes an index. - :arg index: A comma-separated list of indices to delete; use - `_all` or `*` string to delete all indices - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg index: Comma-separated list of indices to delete; use + `_all` or `*` string to delete all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -318,20 +348,22 @@ async def exists(self, index, params=None, headers=None): Returns information about whether a particular index exists. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -342,10 +374,10 @@ async def exists(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "write_index_only", ) @@ -355,26 +387,31 @@ async def put_mapping(self, body, index=None, params=None, headers=None): :arg body: The mapping definition - :arg index: A comma-separated list of index names the mapping - should be added to (supports wildcards); use `_all` or omit to add the - mapping on all indices. + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg write_index_only: When true, applies mappings only to the - write index of an alias or data stream + write index of an alias or data stream. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") + if index in SKIP_IN_PATH: + index = "_all" + return await self.transport.perform_request( "PUT", _make_path(index, "_mapping"), @@ -385,36 +422,37 @@ async def put_mapping(self, body, index=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", ) async def get_mapping(self, index=None, params=None, headers=None): """ Returns mappings for one or more indices. - :arg index: A comma-separated list of index names + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg local (Deprecated: This parameter is a no-op and field + mappings are always retrieved locally.): Return local information, do + not retrieve the state from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( - "GET", - _make_path(index, "_mapping"), - params=params, - headers=headers, + "GET", _make_path(index, "_mapping"), params=params, headers=headers ) @query_params( @@ -429,20 +467,20 @@ async def get_field_mapping(self, fields, index=None, params=None, headers=None) Returns mapping for one or more fields. - :arg fields: A comma-separated list of fields - :arg index: A comma-separated list of index names + :arg fields: Comma-separated list of fields. + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether the default mapping values should - be returned as well + be returned as well. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if fields in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'fields'.") @@ -454,21 +492,23 @@ async def get_field_mapping(self, fields, index=None, params=None, headers=None) headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def put_alias(self, index, name, body=None, params=None, headers=None): """ Creates or updates an alias. - :arg index: A comma-separated list of index names the alias - should point to (supports wildcards); use `_all` to perform the - operation on all indices. - :arg name: The name of the alias to be created or updated + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: The name of the alias to be created or updated. :arg body: The settings for the alias, such as `routing` or `filter` - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -488,19 +528,18 @@ async def exists_alias(self, name, index=None, params=None, headers=None): Returns information about whether a particular alias exists. - :arg name: A comma-separated list of alias names to return - :arg index: A comma-separated list of index names to filter - aliases + :arg name: Comma-separated list of alias names. + :arg index: Comma-separated list of indices to filter aliases. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -515,34 +554,36 @@ async def get_alias(self, index=None, name=None, params=None, headers=None): Returns an alias. - :arg index: A comma-separated list of index names to filter - aliases - :arg name: A comma-separated list of alias names to return + :arg index: Comma-separated list of indices to filter aliases. + :arg name: Comma-separated list of alias names. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ return await self.transport.perform_request( "GET", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def update_aliases(self, body, params=None, headers=None): """ Updates index aliases. :arg body: The definition of `actions` to perform - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Request timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -551,19 +592,22 @@ async def update_aliases(self, body, params=None, headers=None): "POST", "/_aliases", params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def delete_alias(self, index, name, params=None, headers=None): """ Deletes an alias. - :arg index: A comma-separated list of index names (supports - wildcards); use `_all` for all indices - :arg name: A comma-separated list of aliases to delete (supports + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of aliases to delete (supports wildcards); use `_all` to delete all aliases for the specified indices. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -573,21 +617,24 @@ async def delete_alias(self, index, name, params=None, headers=None): "DELETE", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "order") + @query_params("cluster_manager_timeout", "create", "master_timeout", "order") async def put_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg order: The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower - numbers) + numbers). """ for param in (name, body): if param in SKIP_IN_PATH: @@ -601,21 +648,22 @@ async def put_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def exists_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -624,36 +672,40 @@ async def exists_template(self, name, params=None, headers=None): "HEAD", _make_path("_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def get_template(self, name=None, params=None, headers=None): """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_template", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def delete_template(self, name, params=None, headers=None): """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -664,38 +716,41 @@ async def delete_template(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) async def get_settings(self, index=None, name=None, params=None, headers=None): """ Returns settings for one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg name: The name of the settings that should be included + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of settings. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path(index, "_settings", name), params=params, headers=headers @@ -703,11 +758,11 @@ async def get_settings(self, index=None, name=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "preserve_existing", "timeout", ) @@ -717,24 +772,27 @@ async def put_settings(self, body, index=None, params=None, headers=None): :arg body: The index settings to be updated - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg preserve_existing: Whether to update existing settings. If - set to `true` existing settings on an index remain unchanged, the - default is `false` - :arg timeout: Explicit operation timeout + set to `true` existing settings on an index remain unchanged. Default is + false. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -757,43 +815,40 @@ async def put_settings(self, body, index=None, params=None, headers=None): "include_segment_file_sizes", "include_unloaded_segments", "level", - "types", ) async def stats(self, index=None, metric=None, params=None, headers=None): """ Provides statistics on operations happening in an index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg metric: Limit the information returned the specific - metrics. Valid choices: _all, completion, docs, fielddata, query_cache, - flush, get, indexing, merge, request_cache, refresh, search, segments, - store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) + metrics. Valid choices are _all, store, indexing, get, search, merge, + flush, refresh, query_cache, fielddata, docs, warmer, completion, + segments, translog, suggest, request_cache, recovery. + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). :arg forbid_closed_indices: If set to false stats will also collected from closed indices if explicitly specified or if - expand_wildcards expands to closed indices Default: True - :arg groups: A comma-separated list of search groups for - `search` index metric + expand_wildcards expands to closed indices. Default is True. + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) + applies if segment stats are requested). Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. Default is false. :arg level: Return stats aggregated at cluster, index or shard - level Valid choices: cluster, indices, shards Default: indices - :arg types: A comma-separated list of document types for the - `indexing` index metric + level. Valid choices are cluster, indices, shards. """ return await self.transport.perform_request( "GET", _make_path(index, "_stats", metric), params=params, headers=headers @@ -807,17 +862,18 @@ async def segments(self, index=None, params=None, headers=None): Provides low-level information about segments in a Lucene index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg verbose: Includes detailed memory usage by Lucene. + should be ignored when unavailable (missing or closed). + :arg verbose: Includes detailed memory usage by Lucene. Default + is false. """ return await self.transport.perform_request( "GET", _make_path(index, "_segments"), params=params, headers=headers @@ -843,30 +899,29 @@ async def validate_query(self, body=None, index=None, params=None, headers=None) :arg body: The query definition specified with the Query DSL - :arg index: A comma-separated list of index names to restrict - the operation; use `_all` or empty string to perform the operation on - all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg all_shards: Execute validation on all shards instead of one - random shard per index + random shard per index. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg explain: Return detailed information about the error + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg explain: Return detailed information about the error. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored - :arg q: Query in the Lucene query string syntax + as providing text to a numeric field) should be ignored. + :arg q: Query in the Lucene query string syntax. :arg rewrite: Provide a more detailed explanation showing the actual Lucene query that will be executed. """ @@ -892,21 +947,21 @@ async def clear_cache(self, index=None, params=None, headers=None): Clears all or specific caches for one or more indices. - :arg index: A comma-separated list of index name to limit the - operation + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata: Clear field data - :arg fields: A comma-separated list of fields to clear when - using the `fielddata` parameter (default: all) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata: Clear field data. + :arg fields: Comma-separated list of fields to clear when using + the `fielddata` parameter (default: all). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg query: Clear query caches - :arg request: Clear request cache + should be ignored when unavailable (missing or closed). + :arg query: Clear query caches. + :arg request: Clear request cache. """ return await self.transport.perform_request( "POST", _make_path(index, "_cache", "clear"), params=params, headers=headers @@ -918,12 +973,12 @@ async def recovery(self, index=None, params=None, headers=None): Returns information about ongoing index shard recoveries. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg active_only: Display only those recoveries that are - currently on-going + currently on-going. Default is false. :arg detailed: Whether to display detailed information about - shard recovery + shard recovery. Default is false. """ return await self.transport.perform_request( "GET", _make_path(index, "_recovery"), params=params, headers=headers @@ -938,23 +993,23 @@ async def recovery(self, index=None, params=None, headers=None): ) async def upgrade(self, index=None, params=None, headers=None): """ - DEPRECATED Upgrades to the current version of Lucene. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg only_ancient_segments: If true, only ancient (an older - Lucene major release) segments will be upgraded - :arg wait_for_completion: Specify whether the request should - block until the all segments are upgraded (default: false) + Lucene major release) segments will be upgraded. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return await self.transport.perform_request( "POST", _make_path(index, "_upgrade"), params=params, headers=headers @@ -963,19 +1018,19 @@ async def upgrade(self, index=None, params=None, headers=None): @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") async def get_upgrade(self, index=None, params=None, headers=None): """ - DEPRECATED Returns a progress status of current upgrade. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return await self.transport.perform_request( "GET", _make_path(index, "_upgrade"), params=params, headers=headers @@ -989,19 +1044,18 @@ async def shard_stores(self, index=None, params=None, headers=None): Provides store information for shard copies of indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg status: A comma-separated list of statuses used to filter - on shards to get store information for Valid choices: green, yellow, - red, all + should be ignored when unavailable (missing or closed). + :arg status: Comma-separated list of statuses used to filter on + shards to get store information for. """ return await self.transport.perform_request( "GET", _make_path(index, "_shard_stores"), params=params, headers=headers @@ -1020,31 +1074,31 @@ async def forcemerge(self, index=None, params=None, headers=None): Performs the force merge operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg flush: Specify whether the index should be flushed after - performing the operation (default: true) + performing the operation. Default is True. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg max_num_segments: The number of segments the index should - be merged into (default: dynamic) + be merged into (default: dynamic). :arg only_expunge_deletes: Specify whether the operation should - only expunge deleted documents + only expunge deleted documents. """ return await self.transport.perform_request( "POST", _make_path(index, "_forcemerge"), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -1053,15 +1107,18 @@ async def shrink(self, index, target, body=None, params=None, headers=None): Allow to shrink an existing index into a new index with fewer primary shards. - :arg index: The name of the source index to shrink - :arg target: The name of the target index to shrink into + :arg index: The name of the source index to shrink. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1078,9 +1135,9 @@ async def shrink(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -1090,15 +1147,18 @@ async def split(self, index, target, body=None, params=None, headers=None): shards. - :arg index: The name of the source index to split - :arg target: The name of the target index to split into + :arg index: The name of the source index to split. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1115,9 +1175,9 @@ async def split(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -1129,16 +1189,19 @@ async def rollover( to be too large or too old. - :arg alias: The name of the alias to rollover + :arg alias: The name of the alias to rollover. :arg body: The conditions that needs to be met for executing rollover - :arg new_index: The name of the rollover index + :arg new_index: The name of the rollover index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: If set to true the rollover action will only be - validated but not actually performed even if a condition matches. The - default is false - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + validated but not actually performed even if a condition matches. + Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the newly created rollover index before the operation returns. @@ -1154,133 +1217,34 @@ async def rollover( body=body, ) - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - async def freeze(self, index, params=None, headers=None): - """ - Freezes an index. A frozen index has almost no overhead on the cluster (except - for maintaining its metadata in memory) and is read-only. - - - :arg index: The name of the index to freeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", _make_path(index, "_freeze"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - async def unfreeze(self, index, params=None, headers=None): - """ - Unfreezes an index. When a frozen index is unfrozen, the index goes through the - normal recovery process and becomes writeable again. - - - :arg index: The name of the index to unfreeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", _make_path(index, "_unfreeze"), params=params, headers=headers - ) - - @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def reload_search_analyzers(self, index, params=None, headers=None): - """ - Reloads an index's search analyzers and their resources. - - - :arg index: A comma-separated list of index names to reload - analyzers for - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "GET", - _make_path(index, "_reload_search_analyzers"), - params=params, - headers=headers, - ) - @query_params() - async def create_data_stream(self, name, params=None, headers=None): + async def create_data_stream(self, name, body=None, params=None, headers=None): """ - Creates a data stream + Creates or updates a data stream. - :arg name: The name of the data stream + :arg name: The name of the data stream. + :arg body: The data stream definition """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( - "PUT", _make_path("_data_stream", name), params=params, headers=headers + "PUT", + _make_path("_data_stream", name), + params=params, + headers=headers, + body=body, ) - @query_params("expand_wildcards") + @query_params() async def delete_data_stream(self, name, params=None, headers=None): """ Deletes a data stream. - :arg name: A comma-separated list of data streams to delete; use - `*` to delete all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1289,16 +1253,19 @@ async def delete_data_stream(self, name, params=None, headers=None): "DELETE", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def delete_index_template(self, name, params=None, headers=None): """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1310,21 +1277,22 @@ async def delete_index_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def exists_index_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. - :arg name: The name of the template - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1333,40 +1301,44 @@ async def exists_index_template(self, name, params=None, headers=None): "HEAD", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") async def get_index_template(self, name=None, params=None, headers=None): """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") async def put_index_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition :arg cause: User defined reason for creating/updating the index - template + template. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -1380,24 +1352,27 @@ async def put_index_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") async def simulate_index_template(self, name, body=None, params=None, headers=None): """ Simulate matching the given index name against the index templates in the - system + system. :arg name: The name of the index (it must be a concrete index - name) + name). :arg body: New index template definition, which will be included in the simulation, as if it already exists in the system :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1410,38 +1385,38 @@ async def simulate_index_template(self, name, body=None, params=None, headers=No body=body, ) - @query_params("expand_wildcards") + @query_params() async def get_data_stream(self, name=None, params=None, headers=None): """ Returns data streams. - :arg name: A comma-separated list of data streams to get; use - `*` to get all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return await self.transport.perform_request( "GET", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") async def simulate_template(self, body=None, name=None, params=None, headers=None): """ - Simulate resolving the given template name or body + Simulate resolving the given template name or body. :arg body: New index template definition to be simulated, if no index template name is specified - :arg name: The name of the index template + :arg name: The name of the template. :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "POST", @@ -1454,19 +1429,14 @@ async def simulate_template(self, body=None, name=None, params=None, headers=Non @query_params("expand_wildcards") async def resolve_index(self, name, params=None, headers=None): """ - Returns information about any matching indices, aliases, and data streams + Returns information about any matching indices, aliases, and data streams. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg name: A comma-separated list of names or wildcard - expressions - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of names or wildcard + expressions. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1477,10 +1447,10 @@ async def resolve_index(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) async def add_block(self, index, block, params=None, headers=None): @@ -1488,20 +1458,23 @@ async def add_block(self, index, block, params=None, headers=None): Adds a block to an index. - :arg index: A comma separated list of indices to add a block to + :arg index: Comma-separated list of indices to add a block to. :arg block: The block to add (one of read, write, read_only or - metadata) + metadata). :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, block): if param in SKIP_IN_PATH: @@ -1517,8 +1490,8 @@ async def data_streams_stats(self, name=None, params=None, headers=None): Provides statistics on operations happening in a data stream. - :arg name: A comma-separated list of data stream names; use - `_all` or empty string to perform the operation on all data streams + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return await self.transport.perform_request( "GET", @@ -1526,115 +1499,3 @@ async def data_streams_stats(self, name=None, params=None, headers=None): params=params, headers=headers, ) - - @query_params() - async def promote_data_stream(self, name, params=None, headers=None): - """ - Promotes a data stream from a replicated data stream managed by CCR to a - regular data stream - - - :arg name: The name of the data stream - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return await self.transport.perform_request( - "POST", - _make_path("_data_stream", "_promote", name), - params=params, - headers=headers, - ) - - @query_params() - async def migrate_to_data_stream(self, name, params=None, headers=None): - """ - Migrates an alias to a data stream - - - :arg name: The name of the alias to migrate - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return await self.transport.perform_request( - "POST", - _make_path("_data_stream", "_migrate", name), - params=params, - headers=headers, - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "flush", - "ignore_unavailable", - "run_expensive_tasks", - ) - async def disk_usage(self, index, params=None, headers=None): - """ - Analyzes the disk usage of each field of an index or data stream - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: Comma-separated list of indices or data streams to - analyze the disk usage - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flush: Whether flush or not before analyzing the index disk - usage. Defaults to true - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg run_expensive_tasks: Must be set to [true] in order for the - task to be performed. Defaults to false. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", _make_path(index, "_disk_usage"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable" - ) - async def field_usage_stats(self, index, params=None, headers=None): - """ - Returns the field usage stats for each field of an index - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of fields to include in the - stats if only a subset of fields should be returned (supports wildcards) - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "GET", - _make_path(index, "_field_usage_stats"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/_async/client/indices.pyi b/opensearchpy/_async/client/indices.pyi index 53f6d87f..0d9b5953 100644 --- a/opensearchpy/_async/client/indices.pyi +++ b/opensearchpy/_async/client/indices.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -94,8 +103,8 @@ class IndicesClient(NamespacedClient): index: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -117,8 +126,8 @@ class IndicesClient(NamespacedClient): target: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -139,13 +148,13 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., include_defaults: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -164,10 +173,10 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -188,10 +197,10 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -212,10 +221,10 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -259,10 +268,10 @@ class IndicesClient(NamespacedClient): body: Any, index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., write_index_only: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -283,11 +292,11 @@ class IndicesClient(NamespacedClient): *, index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -330,8 +339,8 @@ class IndicesClient(NamespacedClient): name: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -394,8 +403,8 @@ class IndicesClient(NamespacedClient): self, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -415,8 +424,8 @@ class IndicesClient(NamespacedClient): index: Any, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -436,9 +445,9 @@ class IndicesClient(NamespacedClient): name: Any, *, body: Any, + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., order: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -457,10 +466,10 @@ class IndicesClient(NamespacedClient): self, name: Any, *, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -478,10 +487,10 @@ class IndicesClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -499,8 +508,8 @@ class IndicesClient(NamespacedClient): self, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -521,13 +530,13 @@ class IndicesClient(NamespacedClient): index: Optional[Any] = ..., name: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., include_defaults: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -547,11 +556,11 @@ class IndicesClient(NamespacedClient): body: Any, index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., preserve_existing: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -581,7 +590,6 @@ class IndicesClient(NamespacedClient): include_segment_file_sizes: Optional[Any] = ..., include_unloaded_segments: Optional[Any] = ..., level: Optional[Any] = ..., - types: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -731,26 +739,6 @@ class IndicesClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def flush_synced( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... async def shard_stores( self, *, @@ -801,9 +789,9 @@ class IndicesClient(NamespacedClient): target: Any, *, body: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., copy_settings: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -825,9 +813,9 @@ class IndicesClient(NamespacedClient): target: Any, *, body: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., copy_settings: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -849,57 +837,9 @@ class IndicesClient(NamespacedClient): *, body: Optional[Any] = ..., new_index: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def freeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def unfreeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., + dry_run: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -915,30 +855,11 @@ class IndicesClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def reload_search_analyzers( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... async def create_data_stream( self, name: Any, *, + body: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -956,7 +877,6 @@ class IndicesClient(NamespacedClient): self, name: Any, *, - expand_wildcards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -974,8 +894,8 @@ class IndicesClient(NamespacedClient): self, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -994,10 +914,10 @@ class IndicesClient(NamespacedClient): self, name: Any, *, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1015,10 +935,10 @@ class IndicesClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1038,9 +958,9 @@ class IndicesClient(NamespacedClient): *, body: Any, cause: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1060,9 +980,9 @@ class IndicesClient(NamespacedClient): *, body: Optional[Any] = ..., cause: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1080,7 +1000,6 @@ class IndicesClient(NamespacedClient): self, *, name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1100,9 +1019,9 @@ class IndicesClient(NamespacedClient): body: Optional[Any] = ..., name: Optional[Any] = ..., cause: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1140,10 +1059,10 @@ class IndicesClient(NamespacedClient): block: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -1175,80 +1094,3 @@ class IndicesClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - async def promote_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def migrate_to_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def disk_usage( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - run_expensive_tasks: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def field_usage_stats( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index cb5253eb..eab27980 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -50,8 +50,8 @@ async def get_pipeline(self, id=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers @@ -68,8 +68,8 @@ async def put_pipeline(self, id, body, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ for param in (id, body): @@ -94,8 +94,8 @@ async def delete_pipeline(self, id, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: @@ -117,7 +117,7 @@ async def simulate(self, body, id=None, params=None, headers=None): :arg body: The simulate definition :arg id: Pipeline ID. :arg verbose: Verbose mode. Display data output for each - processor in executed pipeline. + processor in executed pipeline. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index e0e8b06b..31cd4915 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -73,10 +73,10 @@ async def info(self, node_id=None, metric=None, params=None, headers=None): node you're connecting to, leave empty to get information from all nodes. :arg metric: Comma-separated list of metrics you wish returned. - Leave empty to return all. Valid choices: settings, os, process, jvm, - thread_pool, transport, http, plugins, ingest - :arg flat_settings: Return settings in flat format. (default: - false) + Leave empty to return all. Valid choices are settings, os, process, jvm, + thread_pool, transport, http, plugins, ingest. + :arg flat_settings: Return settings in flat format. Default is + false. :arg timeout: Operation timeout. """ return await self.transport.perform_request( @@ -105,13 +105,13 @@ async def stats( node you're connecting to, leave empty to get information from all nodes. :arg metric: Limit the information returned to the specified - metrics. Valid choices: _all, breaker, fs, http, indices, jvm, os, - process, thread_pool, transport, discovery, indexing_pressure + metrics. Valid choices are _all, breaker, fs, http, indices, jvm, os, + process, thread_pool, transport, discovery, indexing_pressure. :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) - metric isn't specified. Valid choices: _all, store, indexing, get, + metric isn't specified. Valid choices are _all, store, indexing, get, search, merge, flush, refresh, query_cache, fielddata, docs, warmer, - completion, segments, translog, suggest, request_cache, recovery + completion, segments, translog, suggest, request_cache, recovery. :arg completion_fields: Comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards). :arg fielddata_fields: Comma-separated list of fields for @@ -122,9 +122,9 @@ async def stats( index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested). (default: false) + applies if segment stats are requested). Default is false. :arg level: Return indices stats aggregated at index, node or - shard level. Valid choices: indices, node, shards + shard level. Valid choices are indices, node, shards. :arg timeout: Operation timeout. :arg types: Comma-separated list of document types for the `indexing` index metric. @@ -148,16 +148,16 @@ async def hot_threads(self, node_id=None, params=None, headers=None): the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes. - :arg doc_type: The type to sample. Valid choices: cpu, wait, - block + :arg doc_type: The type to sample. Valid choices are cpu, wait, + block. :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty - task queue. (default: True) + task queue. Default is True. :arg interval: The interval for the second sampling of threads. - :arg snapshots: Number of samples of thread stacktrace. - (default: 10) + :arg snapshots: Number of samples of thread stacktrace. Default + is 10. :arg threads: Specify the number of threads to provide - information for. (default: 3) + information for. Default is 3. :arg timeout: Operation timeout. """ # type is a reserved word so it cannot be used, use doc_type instead @@ -182,7 +182,7 @@ async def usage(self, node_id=None, metric=None, params=None, headers=None): node you're connecting to, leave empty to get information from all nodes. :arg metric: Limit the information returned to the specified - metrics. Valid choices: _all, rest_actions + metrics. Valid choices are _all, rest_actions. :arg timeout: Operation timeout. """ return await self.transport.perform_request( diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index bc8e8671..49e658d9 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -7,6 +7,17 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -17,25 +28,25 @@ class SecurityClient(NamespacedClient): async def get_account_details(self, params=None, headers=None): """ Returns account details for the current user. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "account"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/account", params=params, headers=headers ) @query_params() async def change_password(self, body, params=None, headers=None): """ Changes the password for the current user. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "account"), + "/_plugins/_security/api/account", params=params, headers=headers, body=body, @@ -45,10 +56,13 @@ async def change_password(self, body, params=None, headers=None): async def get_action_group(self, action_group, params=None, headers=None): """ Retrieves one action group. + + + :arg action_group: Action group to retrieve. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return await self.transport.perform_request( @@ -62,10 +76,11 @@ async def get_action_group(self, action_group, params=None, headers=None): async def get_action_groups(self, params=None, headers=None): """ Retrieves all action groups. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups/", params=params, headers=headers, ) @@ -73,11 +88,14 @@ async def get_action_groups(self, params=None, headers=None): @query_params() async def delete_action_group(self, action_group, params=None, headers=None): """ - Deletes the specified action group. + Delete a specified action group. + + + :arg action_group: Action group to delete. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return await self.transport.perform_request( @@ -91,6 +109,10 @@ async def delete_action_group(self, action_group, params=None, headers=None): async def create_action_group(self, action_group, body, params=None, headers=None): """ Creates or replaces the specified action group. + + + :arg action_group: The name of the action group to create or + replace """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -108,6 +130,8 @@ async def create_action_group(self, action_group, body, params=None, headers=Non async def patch_action_group(self, action_group, body, params=None, headers=None): """ Updates individual attributes of an action group. + + """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -125,13 +149,15 @@ async def patch_action_group(self, action_group, body, params=None, headers=None async def patch_action_groups(self, body, params=None, headers=None): """ Creates, updates, or deletes multiple action groups in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups", params=params, headers=headers, body=body, @@ -140,7 +166,9 @@ async def patch_action_groups(self, body, params=None, headers=None): @query_params() async def get_user(self, username, params=None, headers=None): """ - Retrieves one user. + Retrieve one internal user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -155,11 +183,12 @@ async def get_user(self, username, params=None, headers=None): @query_params() async def get_users(self, params=None, headers=None): """ - Retrieves all users. + Retrieve all internal users. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, ) @@ -167,7 +196,9 @@ async def get_users(self, params=None, headers=None): @query_params() async def delete_user(self, username, params=None, headers=None): """ - Deletes the specified user. + Delete the specified user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -183,6 +214,8 @@ async def delete_user(self, username, params=None, headers=None): async def create_user(self, username, body, params=None, headers=None): """ Creates or replaces the specified user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -200,6 +233,8 @@ async def create_user(self, username, body, params=None, headers=None): async def patch_user(self, username, body, params=None, headers=None): """ Updates individual attributes of an internal user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -217,13 +252,15 @@ async def patch_user(self, username, body, params=None, headers=None): async def patch_users(self, body, params=None, headers=None): """ Creates, updates, or deletes multiple internal users in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, body=body, @@ -233,6 +270,8 @@ async def patch_users(self, body, params=None, headers=None): async def get_role(self, role, params=None, headers=None): """ Retrieves one role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -248,18 +287,18 @@ async def get_role(self, role, params=None, headers=None): async def get_roles(self, params=None, headers=None): """ Retrieves all roles. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "roles"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/roles/", params=params, headers=headers ) @query_params() async def delete_role(self, role, params=None, headers=None): """ - Deletes the specified role. + Delete the specified role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -275,6 +314,8 @@ async def delete_role(self, role, params=None, headers=None): async def create_role(self, role, body, params=None, headers=None): """ Creates or replaces the specified role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -292,6 +333,8 @@ async def create_role(self, role, body, params=None, headers=None): async def patch_role(self, role, body, params=None, headers=None): """ Updates individual attributes of a role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -309,13 +352,15 @@ async def patch_role(self, role, body, params=None, headers=None): async def patch_roles(self, body, params=None, headers=None): """ Creates, updates, or deletes multiple roles in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "roles"), + "/_plugins/_security/api/roles", params=params, headers=headers, body=body, @@ -325,6 +370,8 @@ async def patch_roles(self, body, params=None, headers=None): async def get_role_mapping(self, role, params=None, headers=None): """ Retrieves one role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -340,10 +387,11 @@ async def get_role_mapping(self, role, params=None, headers=None): async def get_role_mappings(self, params=None, headers=None): """ Retrieves all role mappings. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, ) @@ -352,6 +400,8 @@ async def get_role_mappings(self, params=None, headers=None): async def delete_role_mapping(self, role, params=None, headers=None): """ Deletes the specified role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -367,6 +417,8 @@ async def delete_role_mapping(self, role, params=None, headers=None): async def create_role_mapping(self, role, body, params=None, headers=None): """ Creates or replaces the specified role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -384,6 +436,8 @@ async def create_role_mapping(self, role, body, params=None, headers=None): async def patch_role_mapping(self, role, body, params=None, headers=None): """ Updates individual attributes of a role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -401,13 +455,15 @@ async def patch_role_mapping(self, role, body, params=None, headers=None): async def patch_role_mappings(self, body, params=None, headers=None): """ Creates or updates multiple role mappings in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, body=body, @@ -417,6 +473,8 @@ async def patch_role_mappings(self, body, params=None, headers=None): async def get_tenant(self, tenant, params=None, headers=None): """ Retrieves one tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -432,18 +490,18 @@ async def get_tenant(self, tenant, params=None, headers=None): async def get_tenants(self, params=None, headers=None): """ Retrieves all tenants. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "tenants"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/tenants/", params=params, headers=headers ) @query_params() async def delete_tenant(self, tenant, params=None, headers=None): """ - Deletes the specified tenant. + Delete the specified tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -459,6 +517,8 @@ async def delete_tenant(self, tenant, params=None, headers=None): async def create_tenant(self, tenant, body, params=None, headers=None): """ Creates or replaces the specified tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -476,6 +536,8 @@ async def create_tenant(self, tenant, body, params=None, headers=None): async def patch_tenant(self, tenant, body, params=None, headers=None): """ Add, delete, or modify a single tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -493,13 +555,15 @@ async def patch_tenant(self, tenant, body, params=None, headers=None): async def patch_tenants(self, body, params=None, headers=None): """ Add, delete, or modify multiple tenants in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "tenants"), + "/_plugins/_security/api/tenants/", params=params, headers=headers, body=body, @@ -508,11 +572,12 @@ async def patch_tenants(self, body, params=None, headers=None): @query_params() async def get_configuration(self, params=None, headers=None): """ - Retrieves the current Security plugin configuration in JSON format. + Returns the current Security plugin configuration in JSON format. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, ) @@ -520,14 +585,16 @@ async def get_configuration(self, params=None, headers=None): @query_params() async def update_configuration(self, body, params=None, headers=None): """ - Retrieves the current Security plugin configuration in JSON format. + Adds or updates the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "securityconfig", "config"), + "/_plugins/_security/api/securityconfig/config", params=params, headers=headers, body=body, @@ -536,14 +603,16 @@ async def update_configuration(self, body, params=None, headers=None): @query_params() async def patch_configuration(self, body, params=None, headers=None): """ - Updates the existing configuration using the REST API. + A PATCH call is used to update the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, body=body, @@ -555,6 +624,8 @@ async def get_distinguished_names( ): """ Retrieves all distinguished names in the allow list. + + """ return await self.transport.perform_request( "GET", @@ -565,14 +636,18 @@ async def get_distinguished_names( @query_params() async def update_distinguished_names( - self, cluster_name, body, params=None, headers=None + self, cluster_name, body=None, params=None, headers=None ): """ - Adds or updates the specified distinguished names in the cluster's or node's allow list. + Adds or updates the specified distinguished names in the cluster’s or node’s + allow list. + + """ - for param in (cluster_name, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") + if cluster_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'cluster_name'." + ) return await self.transport.perform_request( "PUT", @@ -585,11 +660,14 @@ async def update_distinguished_names( @query_params() async def delete_distinguished_names(self, cluster_name, params=None, headers=None): """ - Deletes all distinguished names in the specified cluster's or node's allow list. + Deletes all distinguished names in the specified cluster’s or node’s allow + list. + + """ if cluster_name in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'cluster-name'." + "Empty value passed for a required argument 'cluster_name'." ) return await self.transport.perform_request( @@ -602,25 +680,22 @@ async def delete_distinguished_names(self, cluster_name, params=None, headers=No @query_params() async def get_certificates(self, params=None, headers=None): """ - Retrieves the cluster's security certificates. + Retrieves the cluster’s security certificates. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "ssl", "certs"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/ssl/certs", params=params, headers=headers ) @query_params() async def reload_transport_certificates(self, params=None, headers=None): """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload transport layer communication certificates. + """ return await self.transport.perform_request( "PUT", - _make_path( - "_opendistro", "_security", "api", "ssl", "transport", "reloadcerts" - ), + "/_plugins/_security/api/ssl/transport/reloadcerts", params=params, headers=headers, ) @@ -628,11 +703,12 @@ async def reload_transport_certificates(self, params=None, headers=None): @query_params() async def reload_http_certificates(self, params=None, headers=None): """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload HTTP layer communication certificates. + """ return await self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "ssl", "http", "reloadcerts"), + "/_plugins/_security/api/ssl/http/reloadcerts", params=params, headers=headers, ) @@ -641,12 +717,10 @@ async def reload_http_certificates(self, params=None, headers=None): async def flush_cache(self, params=None, headers=None): """ Flushes the Security plugin user, authentication, and authorization cache. + """ return await self.transport.perform_request( - "DELETE", - _make_path("_plugins", "_security", "api", "cache"), - params=params, - headers=headers, + "DELETE", "/_plugins/_security/api/cache", params=params, headers=headers ) @query_params() @@ -662,13 +736,11 @@ async def health(self, params=None, headers=None): @query_params() async def get_audit_configuration(self, params=None, headers=None): """ - A GET call retrieves the audit configuration. + Retrieves the audit configuration. + """ return await self.transport.perform_request( - "GET", - _make_path("_opendistro", "_security", "api", "audit"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/audit", params=params, headers=headers ) @query_params() @@ -676,6 +748,7 @@ async def update_audit_configuration(self, body, params=None, headers=None): """ Updates the audit configuration. + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -692,13 +765,33 @@ async def update_audit_configuration(self, body, params=None, headers=None): async def patch_audit_configuration(self, body, params=None, headers=None): """ A PATCH call is used to update specified fields in the audit configuration. + + + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "PATCH", + "/_plugins/_security/api/audit", + params=params, + headers=headers, + body=body, + ) + + @query_params() + async def patch_distinguished_names(self, body, params=None, headers=None): + """ + Bulk update of distinguished names. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_opendistro", "_security", "api", "audit"), + "/_plugins/_security/api/nodesdn", params=params, headers=headers, body=body, diff --git a/opensearchpy/_async/client/security.pyi b/opensearchpy/_async/client/security.pyi index 7840445a..182d06c4 100644 --- a/opensearchpy/_async/client/security.pyi +++ b/opensearchpy/_async/client/security.pyi @@ -6,191 +6,734 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union -from .utils import NamespacedClient as NamespacedClient +from .utils import NamespacedClient class SecurityClient(NamespacedClient): async def get_account_details( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def change_password( self, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_action_group( self, action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_action_groups( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def delete_action_group( self, action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def create_action_group( self, action_group: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_action_group( self, action_group: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_action_groups( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_user( self, username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_users( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def delete_user( self, username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def create_user( self, username: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_user( self, username: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_users( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_roles( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def delete_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def create_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_roles( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_role_mappings( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def delete_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def create_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_role_mappings( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_tenant( self, tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_tenants( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def delete_tenant( self, tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def create_tenant( self, tenant: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_tenant( self, tenant: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_tenants( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def update_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_distinguished_names( self, - cluster_name: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + cluster_name: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def update_distinguished_names( self, cluster_name: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + body: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def delete_distinguished_names( self, cluster_name: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def reload_transport_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def reload_http_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def flush_cache( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def health( self, @@ -209,7 +752,20 @@ class SecurityClient(NamespacedClient): headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_audit_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def update_audit_configuration( self, @@ -229,5 +785,36 @@ class SecurityClient(NamespacedClient): headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def patch_audit_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + async def patch_distinguished_names( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 4f2acd6a..37f46820 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -25,25 +25,36 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") async def create(self, repository, snapshot, body=None, params=None, headers=None): """ Creates a snapshot in a repository. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: The snapshot definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -57,18 +68,19 @@ async def create(self, repository, snapshot, body=None, params=None, headers=Non body=body, ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") async def delete(self, repository, snapshot, params=None, headers=None): """ Deletes a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -82,33 +94,25 @@ async def delete(self, repository, snapshot, params=None, headers=None): ) @query_params( - "ignore_unavailable", - "include_repository", - "index_details", - "master_timeout", - "cluster_manager_timeout", - "verbose", + "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) async def get(self, repository, snapshot, params=None, headers=None): """ Returns information about a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg include_repository: Whether to include the repository name - in the snapshot info. Defaults to true. - :arg index_details: Whether to include details of each index in - the snapshot, if those details are available. Defaults to false. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg verbose: Whether to show verbose snapshot info or only show - the basic info found in the repository index blob + the basic info found in the repository index blob. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -121,7 +125,7 @@ async def get(self, repository, snapshot, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def delete_repository(self, repository, params=None, headers=None): """ Deletes a repository. @@ -129,11 +133,12 @@ async def delete_repository(self, repository, params=None, headers=None): :arg repository: Name of the snapshot repository to unregister. Wildcard (`*`) patterns are supported. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -145,38 +150,40 @@ async def delete_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") async def get_repository(self, repository=None, params=None, headers=None): """ Returns information about a repository. - :arg repository: A comma-separated list of repository names + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_snapshot", repository), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout", "verify") + @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") async def create_repository(self, repository, body, params=None, headers=None): """ Creates a repository. - :arg repository: A repository name + :arg repository: Repository name. :arg body: The repository definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout - :arg verify: Whether to verify the repository after creation + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. + :arg verify: Whether to verify the repository after creation. """ for param in (repository, body): if param in SKIP_IN_PATH: @@ -190,21 +197,22 @@ async def create_repository(self, repository, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") async def restore(self, repository, snapshot, body=None, params=None, headers=None): """ Restores a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: Details of what to restore - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -218,21 +226,22 @@ async def restore(self, repository, snapshot, body=None, params=None, headers=No body=body, ) - @query_params("ignore_unavailable", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") async def status(self, repository=None, snapshot=None, params=None, headers=None): """ Returns information about the status of a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", @@ -241,18 +250,19 @@ async def status(self, repository=None, snapshot=None, params=None, headers=None headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def verify_repository(self, repository, params=None, headers=None): """ Verifies a repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -264,18 +274,19 @@ async def verify_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") async def cleanup_repository(self, repository, params=None, headers=None): """ Removes stale data from repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -287,7 +298,7 @@ async def cleanup_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") async def clone( self, repository, snapshot, target_snapshot, body, params=None, headers=None ): @@ -295,14 +306,15 @@ async def clone( Clones indices from one snapshot into another snapshot in the same repository. - :arg repository: A repository name - :arg snapshot: The name of the snapshot to clone from - :arg target_snapshot: The name of the cloned snapshot to create + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg target_snapshot: The name of the cloned snapshot to create. :arg body: The snapshot clone definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot, target_snapshot, body): if param in SKIP_IN_PATH: @@ -315,56 +327,3 @@ async def clone( headers=headers, body=body, ) - - @query_params( - "blob_count", - "concurrency", - "detailed", - "early_read_node_count", - "max_blob_size", - "max_total_data_size", - "rare_action_probability", - "rarely_abort_writes", - "read_node_count", - "seed", - "timeout", - ) - async def repository_analyze(self, repository, params=None, headers=None): - """ - Analyzes a repository for correctness and performance - - - :arg repository: A repository name - :arg blob_count: Number of blobs to create during the test. - Defaults to 100. - :arg concurrency: Number of operations to run concurrently - during the test. Defaults to 10. - :arg detailed: Whether to return detailed results or a summary. - Defaults to 'false' so that only the summary is returned. - :arg early_read_node_count: Number of nodes on which to perform - an early read on a blob, i.e. before writing has completed. Early reads - are rare actions so the 'rare_action_probability' parameter is also - relevant. Defaults to 2. - :arg max_blob_size: Maximum size of a blob to create during the - test, e.g '1gb' or '100mb'. Defaults to '10mb'. - :arg max_total_data_size: Maximum total size of all blobs to - create during the test, e.g '1tb' or '100gb'. Defaults to '1gb'. - :arg rare_action_probability: Probability of taking a rare - action such as an early read or an overwrite. Defaults to 0.02. - :arg rarely_abort_writes: Whether to rarely abort writes before - they complete. Defaults to 'true'. - :arg read_node_count: Number of nodes on which to read a blob - after writing. Defaults to 10. - :arg seed: Seed for the random number generator used to create - the test workload. Defaults to a random value. - :arg timeout: Explicit operation timeout. Defaults to '30s'. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'repository'.") - - return await self.transport.perform_request( - "POST", - _make_path("_snapshot", repository, "_analyze"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/_async/client/snapshot.pyi b/opensearchpy/_async/client/snapshot.pyi index 2167c97f..b065e86b 100644 --- a/opensearchpy/_async/client/snapshot.pyi +++ b/opensearchpy/_async/client/snapshot.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -35,8 +44,8 @@ class SnapshotClient(NamespacedClient): snapshot: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -56,8 +65,8 @@ class SnapshotClient(NamespacedClient): repository: Any, snapshot: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -76,11 +85,9 @@ class SnapshotClient(NamespacedClient): repository: Any, snapshot: Any, *, + cluster_manager_timeout: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., - include_repository: Optional[Any] = ..., - index_details: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., verbose: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -99,8 +106,8 @@ class SnapshotClient(NamespacedClient): self, repository: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -119,9 +126,9 @@ class SnapshotClient(NamespacedClient): self, *, repository: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -140,8 +147,8 @@ class SnapshotClient(NamespacedClient): repository: Any, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., verify: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -163,8 +170,8 @@ class SnapshotClient(NamespacedClient): snapshot: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -184,9 +191,9 @@ class SnapshotClient(NamespacedClient): *, repository: Optional[Any] = ..., snapshot: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -204,8 +211,8 @@ class SnapshotClient(NamespacedClient): self, repository: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -224,8 +231,8 @@ class SnapshotClient(NamespacedClient): self, repository: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -247,36 +254,8 @@ class SnapshotClient(NamespacedClient): target_snapshot: Any, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def repository_analyze( - self, - repository: Any, - *, - blob_count: Optional[Any] = ..., - concurrency: Optional[Any] = ..., - detailed: Optional[Any] = ..., - early_read_node_count: Optional[Any] = ..., - max_blob_size: Optional[Any] = ..., - max_total_data_size: Optional[Any] = ..., - rare_action_probability: Optional[Any] = ..., - rarely_abort_writes: Optional[Any] = ..., - read_node_count: Optional[Any] = ..., - seed: Optional[Any] = ..., - timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 212b9e56..2bf73c6d 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -57,9 +57,10 @@ async def list(self, params=None, headers=None): :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information. + :arg detailed: Return detailed task information. Default is + false. :arg group_by: Group tasks by nodes or parent/child - relationships. Valid choices: nodes, parents, none + relationships. Valid choices are nodes, parents, none. :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all @@ -68,7 +69,7 @@ async def list(self, params=None, headers=None): (node_id:task_number). Set to -1 to return all. :arg timeout: Operation timeout. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. + operation has completed before returning. Default is false. """ return await self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers @@ -91,7 +92,7 @@ async def cancel(self, task_id=None, params=None, headers=None): :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. + operation has completed before returning. Default is false. """ return await self.transport.perform_request( "POST", @@ -110,7 +111,7 @@ async def get(self, task_id=None, params=None, headers=None): (node_id:task_number). :arg timeout: Operation timeout. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. + operation has completed before returning. Default is false. """ if task_id in SKIP_IN_PATH: warnings.warn( diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 8f976879..25a779fd 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -26,6 +26,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from __future__ import unicode_literals import logging @@ -275,25 +285,25 @@ def create(self, index, id, body, params=None, headers=None): with a same ID already exists in the index. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The document :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -320,49 +330,45 @@ def create(self, index, id, body, params=None, headers=None): ) def index(self, index, body, id=None, params=None, headers=None): """ - Creates or overwrites a document in an index. + Creates or updates a document in an index. - :arg index: The name of the index + :arg index: Index name. :arg body: The document - :arg id: Document ID - :arg if_primary_term: only perform the index operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the index operation if the last - operation that has changed the document has the specified sequence - number + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg op_type: Explicit operation type. Defaults to `index` for requests with an explicit document ID, and to `create`for requests - without an explicit document ID Valid choices: index, create + without an explicit document ID. Valid choices are index, create. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: When true, requires destination to be an - alias. Default is false - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + alias. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( "POST" if id in SKIP_IN_PATH else "PUT", - _make_path(index, doc_type, id), + _make_path(index, "_doc", id), params=params, headers=headers, body=body, @@ -386,29 +392,29 @@ def bulk(self, body, index=None, params=None, headers=None): :arg body: The operation definition and data (action-data pairs), separated by newlines - :arg index: Default index for items which don't provide one + :arg index: Default index for items which don't provide one. :arg _source: True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub- - request + request. :arg _source_excludes: Default list of fields to exclude from - the returned _source field, can be overridden on each sub-request + the returned _source field, can be overridden on each sub-request. :arg _source_includes: Default list of fields to extract and - return from the _source field, can be overridden on each sub-request + return from the _source field, can be overridden on each sub-request. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: Sets require_alias for all incoming - documents. Defaults to unset (false) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + documents. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the bulk operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -428,9 +434,9 @@ def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): Explicitly clears the search context for a scroll. - :arg body: A comma-separated list of scroll IDs to clear if none + :arg body: Comma-separated list of scroll IDs to clear if none was specified via the scroll_id parameter - :arg scroll_id: A comma-separated list of scroll IDs to clear + :arg scroll_id: Comma-separated list of scroll IDs to clear. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -464,37 +470,38 @@ def count(self, body=None, index=None, params=None, headers=None): Returns number of documents matching a query. - :arg body: A query to restrict the results specified with the + :arg body: Query to restrict the results specified with the Query DSL (optional) - :arg index: A comma-separated list of indices to restrict the - results + :arg index: Comma-separated list of indices to restrict the + results. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg min_score: Include only documents with a specific `_score` - value in the result + value in the result. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: A comma-separated list of specific routing values - :arg terminate_after: The maximum count for each shard, upon - reaching which the query execution will terminate early + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Comma-separated list of specific routing values. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. """ return self.transport.perform_request( "POST", @@ -519,37 +526,33 @@ def delete(self, index, id, params=None, headers=None): Removes a document from the index. - :arg index: The name of the index - :arg id: The document ID - :arg if_primary_term: only perform the delete operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the delete operation if the last - operation that has changed the document has the specified sequence - number + :arg index: Index name. + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( - "DELETE", _make_path(index, doc_type, id), params=params, headers=headers + "DELETE", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -592,76 +595,76 @@ def delete_by_query(self, index, body, params=None, headers=None): Deletes documents matching the provided query. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the delete by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) + all documents). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the effected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Refresh the shard containing the document before + performing the operation. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the delete - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the delete by query is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -686,9 +689,9 @@ def delete_by_query_rethrottle(self, task_id, params=None, headers=None): operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -700,16 +703,19 @@ def delete_by_query_rethrottle(self, task_id, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def delete_script(self, id, params=None, headers=None): """ Deletes a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -735,35 +741,33 @@ def exists(self, index, id, params=None, headers=None): Returns information about whether a document exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( - "HEAD", _make_path(index, doc_type, id), params=params, headers=headers + "HEAD", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -782,24 +786,24 @@ def exists_source(self, index, id, params=None, headers=None): Returns information about whether a document source exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -830,30 +834,30 @@ def explain(self, index, id, body=None, params=None, headers=None): Returns information about why a specific matches (or doesn't match) a query. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The query definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg analyze_wildcard: Specify whether wildcards and prefix - queries in the query string query should be analyzed (default: false) - :arg analyzer: The analyzer for the query string query + queries in the query string query should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR - :arg df: The default field for query string query (default: - _all) + query (AND or OR). Valid choices are AND, OR. + :arg df: The default field for query string query. Default is + _all. :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -879,19 +883,19 @@ def field_caps(self, body=None, index=None, params=None, headers=None): :arg body: An index filter specified with the Query DSL - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of field names + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fields: Comma-separated list of field names. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_unmapped: Indicates whether unmapped fields should - be included in the response. + be included in the response. Default is false. """ return self.transport.perform_request( "POST", @@ -918,46 +922,47 @@ def get(self, index, id, params=None, headers=None): Returns a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( - "GET", _make_path(index, doc_type, id), params=params, headers=headers + "GET", _make_path(index, "_doc", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") def get_script(self, id, params=None, headers=None): """ Returns a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -982,24 +987,24 @@ def get_source(self, index, id, params=None, headers=None): Returns the source of a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -1027,24 +1032,24 @@ def mget(self, body, index=None, params=None, headers=None): :arg body: Document identifiers; can be either `docs` - (containing full document information) or `ids` (when index and type is - provided in the URL. - :arg index: The name of the index + (containing full document information) or `ids` (when index is provided + in the URL. + :arg index: Index name. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1073,30 +1078,31 @@ def msearch(self, body, index=None, params=None, headers=None): :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg max_concurrent_shard_requests: The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in - order to limit the number of concurrent shard requests Default: 5 - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + order to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1124,19 +1130,20 @@ def msearch_template(self, body, index=None, params=None, headers=None): :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1176,34 +1183,34 @@ def mtermvectors(self, body=None, index=None, params=None, headers=None): :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Applies to all returned documents unless otherwise specified - in body "params" or "docs". Default: True - :arg fields: A comma-separated list of fields to return. Applies - to all returned documents unless otherwise specified in body "params" or - "docs". - :arg ids: A comma-separated list of documents ids. You must - define ids as parameter or set "ids" or "docs" in the request body + in body 'params' or 'docs'. Default is True. + :arg fields: Comma-separated list of fields to return. Applies + to all returned documents unless otherwise specified in body 'params' or + 'docs'. + :arg ids: Comma-separated list of documents ids. You must define + ids as parameter or set 'ids' or 'docs' in the request body. :arg offsets: Specifies if term offsets should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg payloads: Specifies if term payloads should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg positions: Specifies if term positions should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random) .Applies to all returned documents - unless otherwise specified in body "params" or "docs". + be performed on. Applies to all returned documents unless otherwise + specified in body 'params' or 'docs'. Default is random. :arg realtime: Specifies if requests are real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. Applies to all returned - documents unless otherwise specified in body "params" or "docs". + near-real-time. Default is True. + :arg routing: Routing value. Applies to all returned documents + unless otherwise specified in body 'params' or 'docs'. :arg term_statistics: Specifies if total term frequency and document frequency should be returned. Applies to all returned documents - unless otherwise specified in body "params" or "docs". - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + unless otherwise specified in body 'params' or 'docs'. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ path = _make_path(index, "_mtermvectors") @@ -1211,18 +1218,21 @@ def mtermvectors(self, body=None, index=None, params=None, headers=None): "POST", path, params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def put_script(self, id, body, context=None, params=None, headers=None): """ Creates or updates a script. - :arg id: Script ID + :arg id: Script ID. :arg body: The document - :arg context: Context name to compile script against - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg context: Script context. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -1242,28 +1252,23 @@ def put_script(self, id, body, context=None, params=None, headers=None): def rank_eval(self, body, index=None, params=None, headers=None): """ Allows to evaluate the quality of ranked search results over a set of typical - search queries - - - .. warning:: + search queries. - This API is **experimental** so may include breaking changes - or be removed in a future version :arg body: The ranking evaluation search definition, including search requests, document ratings and ranking metric definition. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be ignored when unavailable (missing or closed). + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1296,24 +1301,24 @@ def reindex(self, body, params=None, headers=None): :arg body: The search definition using the Query DSL and the prototype for the index request. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg refresh: Should the affected indexes be refreshed? - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg scroll: Control how long to keep the search context alive - Default: 5m + all documents). + :arg refresh: Should the affected indexes be refreshed?. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 + set to `auto`. Default is 1. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m + shards that are unavailable. Default is 1m. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the reindex operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) - :arg wait_for_completion: Should the request should block until - the reindex is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1328,9 +1333,9 @@ def reindex_rethrottle(self, task_id, params=None, headers=None): Changes the number of requests per second for a particular Reindex operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1349,7 +1354,7 @@ def render_search_template(self, body=None, id=None, params=None, headers=None): :arg body: The search definition template and its params - :arg id: The id of the stored search template + :arg id: The id of the stored search template. """ return self.transport.perform_request( "POST", @@ -1362,13 +1367,8 @@ def render_search_template(self, body=None, id=None, params=None, headers=None): @query_params() def scripts_painless_execute(self, body=None, params=None, headers=None): """ - Allows an arbitrary script to be executed and a result to be returned - - - .. warning:: + Allows an arbitrary script to be executed and a result to be returned. - This API is **experimental** so may include breaking changes - or be removed in a future version :arg body: The script to execute """ @@ -1388,11 +1388,12 @@ def scroll(self, body=None, scroll_id=None, params=None, headers=None): :arg body: The scroll ID if not passed by URL or query parameter. - :arg scroll_id: The scroll ID for scrolled search + :arg scroll_id: Scroll ID. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response + be rendered as an integer or an object in the rest search response. + Default is false. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search + should be maintained for scrolled search. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -1425,7 +1426,6 @@ def scroll(self, body=None, scroll_id=None, params=None, headers=None): "ignore_unavailable", "lenient", "max_concurrent_shard_requests", - "min_compatible_shard_node", "pre_filter_shard_size", "preference", "q", @@ -1456,101 +1456,99 @@ def search(self, body=None, index=None, params=None, headers=None): :arg body: The search definition using the Query DSL - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg allow_partial_search_results: Indicate if an error should - be returned if there is a partial search failure or timeout Default: - True + be returned if there is a partial search failure or timeout. Default is + True. :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large. - Default: 512 + Default is 512. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string - :arg docvalue_fields: A comma-separated list of fields to return - as the docvalue representation of a field for each hit + given in the query string. + :arg docvalue_fields: Comma-separated list of fields to return + as the docvalue representation of a field for each hit. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit - :arg from_: Starting offset (default: 0) + about score computation as part of a hit. + :arg from_: Starting offset. Default is 0. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order - to limit the number of concurrent shard requests Default: 5 - :arg min_compatible_shard_node: The minimum compatible version - that all shards involved in search should have for this request to be - successful - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. :arg seq_no_primary_term: Specify whether to return sequence - number and primary term of the last modification of each hit - :arg size: Number of hits to return (default: 10) - :arg sort: A comma-separated list of : pairs + number and primary term of the last modification of each hit. + :arg size: Number of hits to return. Default is 10. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes - :arg stored_fields: A comma-separated list of stored fields to - return as part of a hit - :arg suggest_field: Specify which field to use for suggestions - :arg suggest_mode: Specify suggest mode Valid choices: missing, - popular, always Default: missing - :arg suggest_size: How many suggestions to return in response + statistical purposes. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg suggest_field: Specify which field to use for suggestions. + :arg suggest_mode: Specify suggest mode. Valid choices are + missing, popular, always. + :arg suggest_size: How many suggestions to return in response. :arg suggest_text: The source text for which the suggestions - should be returned + should be returned. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. :arg track_scores: Whether to calculate and return scores even - if they are not used for sorting + if they are not used for sorting. :arg track_total_hits: Indicate if the number of documents that - match the query should be tracked + match the query should be tracked. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response - :arg version: Specify whether to return document version as part - of a hit + should be prefixed by their respective types in the response. + :arg version: Whether to return document version as part of a + hit. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1578,21 +1576,21 @@ def search_shards(self, index=None, params=None, headers=None): executed against. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + be performed on. Default is random. + :arg routing: Routing value. """ return self.transport.perform_request( "GET", _make_path(index, "_search_shards"), params=params, headers=headers @@ -1619,35 +1617,37 @@ def search_template(self, body, index=None, params=None, headers=None): :arg body: The search definition template and its params - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit + about score computation as part of a hit. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg profile: Specify whether to profile the query execution + be performed on. Default is random. + :arg profile: Specify whether to profile the query execution. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1682,28 +1682,28 @@ def termvectors(self, index, body=None, id=None, params=None, headers=None): :arg index: The index in which the document resides. :arg body: Define parameters and or supply a document to get termvectors for. See documentation. - :arg id: The id of the document, when not specified a doc param - should be supplied. + :arg id: Document ID. When not specified a doc param should be + supplied. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be - returned. Default: True - :arg fields: A comma-separated list of fields to return. + returned. Default is True. + :arg fields: Comma-separated list of fields to return. :arg offsets: Specifies if term offsets should be returned. - Default: True + Default is True. :arg payloads: Specifies if term payloads should be returned. - Default: True + Default is True. :arg positions: Specifies if term positions should be returned. - Default: True + Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random). + be performed on. Default is random. :arg realtime: Specifies if request is real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. + near-real-time. Default is True. + :arg routing: Routing value. :arg term_statistics: Specifies if total term frequency and - document frequency should be returned. - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + document frequency should be returned. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -1733,38 +1733,36 @@ def update(self, index, id, body, params=None, headers=None): Updates a document with a script or partial document. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The request definition requires either `script` or partial `doc` :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field - :arg if_primary_term: only perform the update operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the update operation if the last - operation that has changed the document has the specified sequence - number - :arg lang: The script language (default: painless) + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. + :arg lang: The script language. Default is painless. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg require_alias: When true, requires destination is an alias. - Default is false + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg require_alias: When true, requires destination to be an + alias. Default is false. :arg retry_on_conflict: Specify how many times should the - operation be retried when a conflict occurs (default: 0) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + operation be retried when a conflict occurs. Default is 0. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -1809,7 +1807,6 @@ def update(self, index, id, body, params=None, headers=None): "terminate_after", "timeout", "version", - "version_type", "wait_for_active_shards", "wait_for_completion", ) @@ -1819,81 +1816,77 @@ def update_by_query(self, index, body=None, params=None, headers=None): for example to pick up a mapping change. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL - search; leave empty to perform the operation on all types :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the update by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg pipeline: Ingest pipeline to set on index requests made by - this action. (default: none) + all documents). + :arg pipeline: The pipeline id to preprocess incoming documents + with. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the affected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Should the affected indexes be refreshed?. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + this request or not, defaults to index level setting. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the update - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit - :arg version_type: Should the document increment the version - number (internal) on hit or not (reindex) + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the update by query operation is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1917,9 +1910,9 @@ def update_by_query_rethrottle(self, task_id, params=None, headers=None): operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1936,11 +1929,6 @@ def get_script_context(self, params=None, headers=None): """ Returns all script contexts. - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version """ return self.transport.perform_request( "GET", "/_script_context", params=params, headers=headers @@ -1949,13 +1937,8 @@ def get_script_context(self, params=None, headers=None): @query_params() def get_script_languages(self, params=None, headers=None): """ - Returns available script types, languages and contexts - - - .. warning:: + Returns available script types, languages and contexts. - This API is **experimental** so may include breaking changes - or be removed in a future version """ return self.transport.perform_request( "GET", "/_script_language", params=params, headers=headers @@ -1978,11 +1961,11 @@ def create_pit(self, index, params=None, headers=None): :arg allow_partial_pit_creation: Allow if point in time can be created with partial failures. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg keep_alive: Specify the keep alive for point in time. :arg preference: Specify the node or shard the operation should - be performed on. + be performed on. Default is random. :arg routing: Comma-separated list of specific routing values. """ if index in SKIP_IN_PATH: @@ -2011,7 +1994,7 @@ def delete_pit(self, body=None, params=None, headers=None): Deletes one or more point in time searches based on the IDs passed. - :arg body: a point-in-time id to delete + :arg body: The point-in-time ids to be deleted """ return self.transport.perform_request( "DELETE", @@ -2025,36 +2008,8 @@ def delete_pit(self, body=None, params=None, headers=None): def get_all_pits(self, params=None, headers=None): """ Lists all active point in time searches. - """ - return self.transport.perform_request( - "GET", "/_search/point_in_time/_all", params=params, headers=headers - ) - - @query_params() - def terms_enum(self, index, body=None, params=None, headers=None): - """ - The terms enum API can be used to discover terms in the index that begin with - the provided string. It is designed for low-latency look-ups used in auto- - complete scenarios. - - - .. warning:: - This API is **beta** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices - :arg body: field name, string which is the prefix expected in - matching terms, timeout and size for max number of results """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - return self.transport.perform_request( - "POST", - _make_path(index, "_terms_enum"), - params=params, - headers=headers, - body=body, + "GET", "/_search/point_in_time/_all", params=params, headers=headers ) diff --git a/opensearchpy/client/__init__.pyi b/opensearchpy/client/__init__.pyi index e1d1e359..5bf53ec7 100644 --- a/opensearchpy/client/__init__.pyi +++ b/opensearchpy/client/__init__.pyi @@ -25,6 +25,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from __future__ import unicode_literals import logging @@ -59,7 +68,10 @@ class OpenSearch(object): snapshot: SnapshotClient tasks: TasksClient def __init__( - self, hosts: Any = ..., transport_class: Type[Transport] = ..., **kwargs: Any + self, + hosts: Any = ..., + transport_class: Type[Transport] = ..., + **kwargs: Any, ) -> None: ... def __repr__(self) -> str: ... def __enter__(self) -> "OpenSearch": ... @@ -330,8 +342,8 @@ class OpenSearch(object): self, id: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -487,8 +499,8 @@ class OpenSearch(object): self, id: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -639,8 +651,8 @@ class OpenSearch(object): *, body: Any, context: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -799,7 +811,6 @@ class OpenSearch(object): ignore_unavailable: Optional[Any] = ..., lenient: Optional[Any] = ..., max_concurrent_shard_requests: Optional[Any] = ..., - min_compatible_shard_node: Optional[Any] = ..., pre_filter_shard_size: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., @@ -988,7 +999,6 @@ class OpenSearch(object): terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., version: Optional[Any] = ..., - version_type: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -1125,21 +1135,3 @@ class OpenSearch(object): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def terms_enum( - self, - index: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/_patch.pyi b/opensearchpy/client/_patch.pyi index be6e12a0..b01423b9 100644 --- a/opensearchpy/client/_patch.pyi +++ b/opensearchpy/client/_patch.pyi @@ -6,6 +6,7 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. + from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union def list_all_point_in_time( diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 8dac68cd..cd0c42cf 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -48,17 +48,17 @@ def aliases(self, name=None, params=None, headers=None): :arg name: Comma-separated list of alias names. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers @@ -83,22 +83,22 @@ def allocation(self, node_id=None, params=None, headers=None): :arg node_id: Comma-separated list of node IDs or names to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", @@ -119,10 +119,10 @@ def count(self, index=None, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers @@ -137,13 +137,13 @@ def health(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg ts: Set to false to disable timestamping. (default: True) - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg ts: Set to false to disable timestamping. Default is True. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers @@ -155,7 +155,7 @@ def help(self, params=None, headers=None): Returns help for the Cat APIs. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. """ @@ -187,35 +187,35 @@ def indices(self, index=None, params=None, headers=None): :arg index: Comma-separated list of indices to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg health: Health status ('green', 'yellow', or 'red') to - filter only indices matching the specified health status. Valid - choices: green, yellow, red - :arg help: Return help information. (default: false) + filter only indices matching the specified health status. Valid choices + are green, yellow, red. + :arg help: Return help information. Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory. (default: false) + memory. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg pri: Set to true to return stats only for primary shards. - (default: false) + Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers @@ -241,15 +241,15 @@ def master(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ from warnings import warn @@ -280,15 +280,15 @@ def cluster_manager(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/cluster_manager", params=params, headers=headers @@ -312,27 +312,27 @@ def nodes(self, params=None, headers=None): Returns basic statistics about performance of cluster nodes. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg full_id: Return the full node ID instead of the shortened - version. (default: false) + version. Default is false. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local (Deprecated: This parameter does not cause this API - to act locally): Return local information, do not retrieve the state - from cluster-manager node. (default: false) + to act locally.): Return local information, do not retrieve the state + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers @@ -349,20 +349,20 @@ def recovery(self, index=None, params=None, headers=None): :arg index: Comma-separated list or wildcard expression of index names to limit the returned information. :arg active_only: If `true`, the response only includes ongoing - shard recoveries. (default: false) - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg detailed: If `true`, the response includes detailed - information about shard recoveries. (default: false) + information about shard recoveries. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "recovery", index), params=params, headers=headers @@ -387,24 +387,24 @@ def shards(self, index=None, params=None, headers=None): :arg index: Comma-separated list of indices to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "shards", index), params=params, headers=headers @@ -427,20 +427,20 @@ def segments(self, index=None, params=None, headers=None): :arg index: Comma-separated list of indices to limit the returned information. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "segments", index), params=params, headers=headers @@ -467,17 +467,17 @@ def pending_tasks(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/pending_tasks", params=params, headers=headers @@ -507,16 +507,16 @@ def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. :arg size: The multiplier in which to display values. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", @@ -534,15 +534,15 @@ def fielddata(self, fields=None, params=None, headers=None): :arg fields: Comma-separated list of fields to return in the output. - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", @@ -571,15 +571,15 @@ def plugins(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/plugins", params=params, headers=headers @@ -605,15 +605,15 @@ def nodeattrs(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/nodeattrs", params=params, headers=headers @@ -639,15 +639,15 @@ def repositories(self, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/repositories", params=params, headers=headers @@ -675,17 +675,18 @@ def snapshots(self, repository=None, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed). (default: false) + should be ignored when unavailable (missing or closed). Default is + false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", @@ -714,12 +715,12 @@ def tasks(self, params=None, headers=None): :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information. (default: - false) + :arg detailed: Return detailed task information. Default is + false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all @@ -728,9 +729,9 @@ def tasks(self, params=None, headers=None): (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers @@ -757,15 +758,15 @@ def templates(self, name=None, params=None, headers=None): :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg v: Verbose mode. Display column headers. (default: false) + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers @@ -787,7 +788,6 @@ def pit_segments(self, body=None, params=None, headers=None): List segments for one or several PITs. - :arg body: """ return self.transport.perform_request( "GET", "/_cat/pit_segments", params=params, headers=headers, body=body @@ -815,23 +815,23 @@ def segment_replication(self, index=None, params=None, headers=None): :arg index: Comma-separated list or wildcard expression of index names to limit the returned information. :arg active_only: If `true`, the response only includes ongoing - segment replication events. (default: false) - :arg bytes: The unit in which to display byte values. Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + segment replication events. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg completed_only: If `true`, the response only includes - latest completed segment replication events. (default: false) + latest completed segment replication events. Default is false. :arg detailed: If `true`, the response includes detailed - information about segment replications. (default: false) + information about segment replications. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. - :arg help: Return help information. (default: false) + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. :arg shards: Comma-separated list of shards to display. - :arg time: The unit in which to display time values. Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers. (default: false) + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index 28f1f0e8..f2276261 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -65,22 +65,22 @@ def health(self, index=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg level: Specify the level of detail for returned - information. Valid choices: cluster, indices, shards, - awareness_attributes + information. Valid choices are cluster, indices, shards, + awareness_attributes. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. :arg wait_for_active_shards: Wait until the specified number of shards is active. :arg wait_for_events: Wait until all currently queued events - with the given priority are processed. Valid choices: immediate, - urgent, high, normal, low, languid + with the given priority are processed. Valid choices are immediate, + urgent, high, normal, low, languid. :arg wait_for_no_initializing_shards: Whether to wait until there are no initializing shards in the cluster. :arg wait_for_no_relocating_shards: Whether to wait until there @@ -88,7 +88,7 @@ def health(self, index=None, params=None, headers=None): :arg wait_for_nodes: Wait until the specified number of nodes is available. :arg wait_for_status: Wait until cluster is in a specific state. - Valid choices: green, yellow, red + Valid choices are green, yellow, red. """ return self.transport.perform_request( "GET", @@ -107,10 +107,10 @@ def pending_tasks(self, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers @@ -133,8 +133,8 @@ def state(self, metric=None, index=None, params=None, headers=None): :arg metric: Limit the information returned to the specified - metrics. Valid choices: _all, blocks, metadata, nodes, routing_table, - routing_nodes, master_node, cluster_manager_node, version + metrics. Valid choices are _all, blocks, metadata, nodes, routing_table, + routing_nodes, master_node, cluster_manager_node, version. :arg index: Comma-separated list of indices; use `_all` or empty string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices @@ -143,17 +143,17 @@ def state(self, metric=None, index=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: all, - open, closed, hidden, none - :arg flat_settings: Return settings in flat format. (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_metadata_version: Wait for the metadata version to be equal or greater than the specified metadata version. :arg wait_for_timeout: The maximum time to wait for @@ -179,8 +179,8 @@ def stats(self, node_id=None, params=None, headers=None): the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes. - :arg flat_settings: Return settings in flat format. (default: - false) + :arg flat_settings: Return settings in flat format. Default is + false. :arg timeout: Operation timeout. """ return self.transport.perform_request( @@ -215,8 +215,8 @@ def reroute(self, body=None, params=None, headers=None): :arg explain: Return an explanation of why the commands can or cannot be executed. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg metric: Limit the information returned to the specified metrics. Defaults to all but metadata. :arg retry_failed: Retries allocation of shards that are blocked @@ -241,13 +241,13 @@ def get_settings(self, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. - :arg flat_settings: Return settings in flat format. (default: - false) + :arg flat_settings: Return settings in flat format. Default is + false. :arg include_defaults: Whether to return all default clusters - setting. (default: false) + setting. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ return self.transport.perform_request( @@ -266,11 +266,11 @@ def put_settings(self, body, params=None, headers=None): or `persistent` (survives cluster restart). :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. - :arg flat_settings: Return settings in flat format. (default: - false) + :arg flat_settings: Return settings in flat format. Default is + false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: @@ -299,9 +299,9 @@ def allocation_explain(self, body=None, params=None, headers=None): :arg body: The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard' :arg include_disk_info: Return information about disk usage and - shard sizes. (default: false) + shard sizes. Default is false. :arg include_yes_decisions: Return 'YES' decisions in - explanation. (default: false) + explanation. Default is false. """ return self.transport.perform_request( "POST", @@ -321,8 +321,8 @@ def delete_component_template(self, name, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: @@ -345,10 +345,10 @@ def get_component_template(self, name=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", @@ -368,10 +368,10 @@ def put_component_template(self, name, body, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one. (default: false) + new or can also replace an existing one. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ for param in (name, body): @@ -386,18 +386,20 @@ def put_component_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("local", "master_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") def exists_component_template(self, name, params=None, headers=None): """ Returns information about whether a particular component template exist. :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster-manager node. (default: false) + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -417,7 +419,7 @@ def delete_voting_config_exclusions(self, params=None, headers=None): :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting - configuration exclusions list. (default: True) + configuration exclusions list. Default is True. """ return self.transport.perform_request( "DELETE", diff --git a/opensearchpy/client/cluster.pyi b/opensearchpy/client/cluster.pyi index ccc3737a..7ea5016f 100644 --- a/opensearchpy/client/cluster.pyi +++ b/opensearchpy/client/cluster.pyi @@ -300,6 +300,7 @@ class ClusterClient(NamespacedClient): self, name: Any, *, + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index b04698ad..7cb3ea34 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -53,8 +53,8 @@ def delete_dangling_index(self, index_uuid, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: @@ -81,8 +81,8 @@ def import_dangling_index(self, index_uuid, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 138692e1..9db06dd6 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -25,6 +25,16 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -38,7 +48,7 @@ def analyze(self, body=None, index=None, params=None, headers=None): :arg body: Define analyzer/tokenizer parameters and the text on which the analysis should be performed - :arg index: The name of the index to scope the operation + :arg index: The name of the index to scope the operation. """ return self.transport.perform_request( "POST", @@ -54,16 +64,16 @@ def refresh(self, index=None, params=None, headers=None): Performs the refresh operation in one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return self.transport.perform_request( "POST", _make_path(index, "_refresh"), params=params, headers=headers @@ -81,44 +91,47 @@ def flush(self, index=None, params=None, headers=None): Performs the flush operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string for all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg force: Whether a flush should be forced even if it is not necessarily needed ie. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as - internal) + internal). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg wait_if_ongoing: If set to true the flush operation will block until the flush can be executed if another flush operation is - already executing. The default is true. If set to false the flush will - be skipped iff if another flush operation is already running. + already executing. If set to false the flush will be skipped iff if + another flush operation is already running. Default is True. """ return self.transport.perform_request( "POST", _make_path(index, "_flush"), params=params, headers=headers ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) def create(self, index, body=None, params=None, headers=None): """ Creates an index with optional settings and mappings. - :arg index: The name of the index + :arg index: Index name. :arg body: The configuration for the index (`settings` and `mappings`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for before the operation returns. """ @@ -130,20 +143,23 @@ def create(self, index, body=None, params=None, headers=None): ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) def clone(self, index, target, body=None, params=None, headers=None): """ - Clones an index + Clones an index. - :arg index: The name of the source index to clone - :arg target: The name of the target index to clone into + :arg index: The name of the source index to clone. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the cloned index before the operation returns. """ @@ -161,35 +177,40 @@ def clone(self, index, target, body=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) def get(self, index, params=None, headers=None): """ Returns information about one or more indices. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -200,10 +221,10 @@ def get(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -212,18 +233,21 @@ def open(self, index, params=None, headers=None): Opens an index. - :arg index: A comma separated list of indices to open + :arg index: Comma-separated list of indices to open. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ @@ -236,10 +260,10 @@ def open(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -248,22 +272,23 @@ def close(self, index, params=None, headers=None): Closes an index. - :arg index: A comma separated list of indices to close + :arg index: Comma-separated list of indices to close. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. Set to `index-setting` to wait - according to the index setting `index.write.wait_for_active_shards`, or - `all` to wait for all shards, or an integer. Defaults to `0`. + wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -274,10 +299,10 @@ def close(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) def delete(self, index, params=None, headers=None): @@ -285,18 +310,23 @@ def delete(self, index, params=None, headers=None): Deletes an index. - :arg index: A comma-separated list of indices to delete; use - `_all` or `*` string to delete all indices - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg index: Comma-separated list of indices to delete; use + `_all` or `*` string to delete all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -318,20 +348,22 @@ def exists(self, index, params=None, headers=None): Returns information about whether a particular index exists. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -342,10 +374,10 @@ def exists(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "write_index_only", ) @@ -355,22 +387,24 @@ def put_mapping(self, body, index=None, params=None, headers=None): :arg body: The mapping definition - :arg index: A comma-separated list of index names the mapping - should be added to (supports wildcards); use `_all` or omit to add the - mapping on all indices. + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg write_index_only: When true, applies mappings only to the - write index of an alias or data stream + write index of an alias or data stream. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -388,36 +422,37 @@ def put_mapping(self, body, index=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", ) def get_mapping(self, index=None, params=None, headers=None): """ Returns mappings for one or more indices. - :arg index: A comma-separated list of index names + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg local (Deprecated: This parameter is a no-op and field + mappings are always retrieved locally.): Return local information, do + not retrieve the state from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( - "GET", - _make_path(index, "_mapping"), - params=params, - headers=headers, + "GET", _make_path(index, "_mapping"), params=params, headers=headers ) @query_params( @@ -432,20 +467,20 @@ def get_field_mapping(self, fields, index=None, params=None, headers=None): Returns mapping for one or more fields. - :arg fields: A comma-separated list of fields - :arg index: A comma-separated list of index names + :arg fields: Comma-separated list of fields. + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether the default mapping values should - be returned as well + be returned as well. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if fields in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'fields'.") @@ -457,21 +492,23 @@ def get_field_mapping(self, fields, index=None, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def put_alias(self, index, name, body=None, params=None, headers=None): """ Creates or updates an alias. - :arg index: A comma-separated list of index names the alias - should point to (supports wildcards); use `_all` to perform the - operation on all indices. - :arg name: The name of the alias to be created or updated + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: The name of the alias to be created or updated. :arg body: The settings for the alias, such as `routing` or `filter` - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -491,19 +528,18 @@ def exists_alias(self, name, index=None, params=None, headers=None): Returns information about whether a particular alias exists. - :arg name: A comma-separated list of alias names to return - :arg index: A comma-separated list of index names to filter - aliases + :arg name: Comma-separated list of alias names. + :arg index: Comma-separated list of indices to filter aliases. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -518,34 +554,36 @@ def get_alias(self, index=None, name=None, params=None, headers=None): Returns an alias. - :arg index: A comma-separated list of index names to filter - aliases - :arg name: A comma-separated list of alias names to return + :arg index: Comma-separated list of indices to filter aliases. + :arg name: Comma-separated list of alias names. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ return self.transport.perform_request( "GET", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def update_aliases(self, body, params=None, headers=None): """ Updates index aliases. :arg body: The definition of `actions` to perform - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Request timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -554,19 +592,22 @@ def update_aliases(self, body, params=None, headers=None): "POST", "/_aliases", params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def delete_alias(self, index, name, params=None, headers=None): """ Deletes an alias. - :arg index: A comma-separated list of index names (supports - wildcards); use `_all` for all indices - :arg name: A comma-separated list of aliases to delete (supports + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of aliases to delete (supports wildcards); use `_all` to delete all aliases for the specified indices. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -576,21 +617,24 @@ def delete_alias(self, index, name, params=None, headers=None): "DELETE", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "order") + @query_params("cluster_manager_timeout", "create", "master_timeout", "order") def put_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg order: The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower - numbers) + numbers). """ for param in (name, body): if param in SKIP_IN_PATH: @@ -604,21 +648,22 @@ def put_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def exists_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -627,36 +672,40 @@ def exists_template(self, name, params=None, headers=None): "HEAD", _make_path("_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def get_template(self, name=None, params=None, headers=None): """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_template", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def delete_template(self, name, params=None, headers=None): """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -667,38 +716,41 @@ def delete_template(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) def get_settings(self, index=None, name=None, params=None, headers=None): """ Returns settings for one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg name: The name of the settings that should be included + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of settings. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path(index, "_settings", name), params=params, headers=headers @@ -706,11 +758,11 @@ def get_settings(self, index=None, name=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "preserve_existing", "timeout", ) @@ -720,24 +772,27 @@ def put_settings(self, body, index=None, params=None, headers=None): :arg body: The index settings to be updated - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg preserve_existing: Whether to update existing settings. If - set to `true` existing settings on an index remain unchanged, the - default is `false` - :arg timeout: Explicit operation timeout + set to `true` existing settings on an index remain unchanged. Default is + false. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -760,43 +815,40 @@ def put_settings(self, body, index=None, params=None, headers=None): "include_segment_file_sizes", "include_unloaded_segments", "level", - "types", ) def stats(self, index=None, metric=None, params=None, headers=None): """ Provides statistics on operations happening in an index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg metric: Limit the information returned the specific - metrics. Valid choices: _all, completion, docs, fielddata, query_cache, - flush, get, indexing, merge, request_cache, refresh, search, segments, - store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) + metrics. Valid choices are _all, store, indexing, get, search, merge, + flush, refresh, query_cache, fielddata, docs, warmer, completion, + segments, translog, suggest, request_cache, recovery. + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). :arg forbid_closed_indices: If set to false stats will also collected from closed indices if explicitly specified or if - expand_wildcards expands to closed indices Default: True - :arg groups: A comma-separated list of search groups for - `search` index metric + expand_wildcards expands to closed indices. Default is True. + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) + applies if segment stats are requested). Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. Default is false. :arg level: Return stats aggregated at cluster, index or shard - level Valid choices: cluster, indices, shards Default: indices - :arg types: A comma-separated list of document types for the - `indexing` index metric + level. Valid choices are cluster, indices, shards. """ return self.transport.perform_request( "GET", _make_path(index, "_stats", metric), params=params, headers=headers @@ -810,17 +862,18 @@ def segments(self, index=None, params=None, headers=None): Provides low-level information about segments in a Lucene index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg verbose: Includes detailed memory usage by Lucene. + should be ignored when unavailable (missing or closed). + :arg verbose: Includes detailed memory usage by Lucene. Default + is false. """ return self.transport.perform_request( "GET", _make_path(index, "_segments"), params=params, headers=headers @@ -846,32 +899,29 @@ def validate_query(self, body=None, index=None, params=None, headers=None): :arg body: The query definition specified with the Query DSL - :arg index: A comma-separated list of index names to restrict - the operation; use `_all` or empty string to perform the operation on - all indices - restrict the operation; leave empty to perform the operation on all - types + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg all_shards: Execute validation on all shards instead of one - random shard per index + random shard per index. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg explain: Return detailed information about the error + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg explain: Return detailed information about the error. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored - :arg q: Query in the Lucene query string syntax + as providing text to a numeric field) should be ignored. + :arg q: Query in the Lucene query string syntax. :arg rewrite: Provide a more detailed explanation showing the actual Lucene query that will be executed. """ @@ -897,21 +947,21 @@ def clear_cache(self, index=None, params=None, headers=None): Clears all or specific caches for one or more indices. - :arg index: A comma-separated list of index name to limit the - operation + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata: Clear field data - :arg fields: A comma-separated list of fields to clear when - using the `fielddata` parameter (default: all) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata: Clear field data. + :arg fields: Comma-separated list of fields to clear when using + the `fielddata` parameter (default: all). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg query: Clear query caches - :arg request: Clear request cache + should be ignored when unavailable (missing or closed). + :arg query: Clear query caches. + :arg request: Clear request cache. """ return self.transport.perform_request( "POST", _make_path(index, "_cache", "clear"), params=params, headers=headers @@ -923,12 +973,12 @@ def recovery(self, index=None, params=None, headers=None): Returns information about ongoing index shard recoveries. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg active_only: Display only those recoveries that are - currently on-going + currently on-going. Default is false. :arg detailed: Whether to display detailed information about - shard recovery + shard recovery. Default is false. """ return self.transport.perform_request( "GET", _make_path(index, "_recovery"), params=params, headers=headers @@ -943,23 +993,23 @@ def recovery(self, index=None, params=None, headers=None): ) def upgrade(self, index=None, params=None, headers=None): """ - DEPRECATED Upgrades to the current version of Lucene. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg only_ancient_segments: If true, only ancient (an older - Lucene major release) segments will be upgraded - :arg wait_for_completion: Specify whether the request should - block until the all segments are upgraded (default: false) + Lucene major release) segments will be upgraded. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return self.transport.perform_request( "POST", _make_path(index, "_upgrade"), params=params, headers=headers @@ -968,49 +1018,24 @@ def upgrade(self, index=None, params=None, headers=None): @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") def get_upgrade(self, index=None, params=None, headers=None): """ - DEPRECATED Returns a progress status of current upgrade. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return self.transport.perform_request( "GET", _make_path(index, "_upgrade"), params=params, headers=headers ) - @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def flush_synced(self, index=None, params=None, headers=None): - """ - Performs a synced flush operation on one or more indices. Synced flush is - deprecated. Use flush instead - - - :arg index: A comma-separated list of index names; use `_all` or - empty string for all indices - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - return self.transport.perform_request( - "POST", - _make_path(index, "_flush", "synced"), - params=params, - headers=headers, - ) - @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status" ) @@ -1019,19 +1044,18 @@ def shard_stores(self, index=None, params=None, headers=None): Provides store information for shard copies of indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg status: A comma-separated list of statuses used to filter - on shards to get store information for Valid choices: green, yellow, - red, all + should be ignored when unavailable (missing or closed). + :arg status: Comma-separated list of statuses used to filter on + shards to get store information for. """ return self.transport.perform_request( "GET", _make_path(index, "_shard_stores"), params=params, headers=headers @@ -1050,31 +1074,31 @@ def forcemerge(self, index=None, params=None, headers=None): Performs the force merge operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg flush: Specify whether the index should be flushed after - performing the operation (default: true) + performing the operation. Default is True. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg max_num_segments: The number of segments the index should - be merged into (default: dynamic) + be merged into (default: dynamic). :arg only_expunge_deletes: Specify whether the operation should - only expunge deleted documents + only expunge deleted documents. """ return self.transport.perform_request( "POST", _make_path(index, "_forcemerge"), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -1083,15 +1107,18 @@ def shrink(self, index, target, body=None, params=None, headers=None): Allow to shrink an existing index into a new index with fewer primary shards. - :arg index: The name of the source index to shrink - :arg target: The name of the target index to shrink into + :arg index: The name of the source index to shrink. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1108,9 +1135,9 @@ def shrink(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -1120,15 +1147,18 @@ def split(self, index, target, body=None, params=None, headers=None): shards. - :arg index: The name of the source index to split - :arg target: The name of the target index to split into + :arg index: The name of the source index to split. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1145,9 +1175,9 @@ def split(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) @@ -1157,16 +1187,19 @@ def rollover(self, alias, body=None, new_index=None, params=None, headers=None): to be too large or too old. - :arg alias: The name of the alias to rollover + :arg alias: The name of the alias to rollover. :arg body: The conditions that needs to be met for executing rollover - :arg new_index: The name of the rollover index + :arg new_index: The name of the rollover index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: If set to true the rollover action will only be - validated but not actually performed even if a condition matches. The - default is false - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + validated but not actually performed even if a condition matches. + Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the newly created rollover index before the operation returns. @@ -1182,133 +1215,34 @@ def rollover(self, alias, body=None, new_index=None, params=None, headers=None): body=body, ) - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - def freeze(self, index, params=None, headers=None): - """ - Freezes an index. A frozen index has almost no overhead on the cluster (except - for maintaining its metadata in memory) and is read-only. - - - :arg index: The name of the index to freeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "POST", _make_path(index, "_freeze"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - def unfreeze(self, index, params=None, headers=None): - """ - Unfreezes an index. When a frozen index is unfrozen, the index goes through the - normal recovery process and becomes writeable again. - - - :arg index: The name of the index to unfreeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "POST", _make_path(index, "_unfreeze"), params=params, headers=headers - ) - - @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def reload_search_analyzers(self, index, params=None, headers=None): - """ - Reloads an index's search analyzers and their resources. - - - :arg index: A comma-separated list of index names to reload - analyzers for - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "GET", - _make_path(index, "_reload_search_analyzers"), - params=params, - headers=headers, - ) - @query_params() - def create_data_stream(self, name, params=None, headers=None): + def create_data_stream(self, name, body=None, params=None, headers=None): """ - Creates a data stream + Creates or updates a data stream. - :arg name: The name of the data stream + :arg name: The name of the data stream. + :arg body: The data stream definition """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( - "PUT", _make_path("_data_stream", name), params=params, headers=headers + "PUT", + _make_path("_data_stream", name), + params=params, + headers=headers, + body=body, ) - @query_params("expand_wildcards") + @query_params() def delete_data_stream(self, name, params=None, headers=None): """ Deletes a data stream. - :arg name: A comma-separated list of data streams to delete; use - `*` to delete all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1317,16 +1251,19 @@ def delete_data_stream(self, name, params=None, headers=None): "DELETE", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def delete_index_template(self, name, params=None, headers=None): """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1338,21 +1275,22 @@ def delete_index_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def exists_index_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. - :arg name: The name of the template - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1361,40 +1299,44 @@ def exists_index_template(self, name, params=None, headers=None): "HEAD", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") def get_index_template(self, name=None, params=None, headers=None): """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") def put_index_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition :arg cause: User defined reason for creating/updating the index - template + template. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -1408,24 +1350,27 @@ def put_index_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") def simulate_index_template(self, name, body=None, params=None, headers=None): """ Simulate matching the given index name against the index templates in the - system + system. :arg name: The name of the index (it must be a concrete index - name) + name). :arg body: New index template definition, which will be included in the simulation, as if it already exists in the system :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1438,38 +1383,38 @@ def simulate_index_template(self, name, body=None, params=None, headers=None): body=body, ) - @query_params("expand_wildcards") + @query_params() def get_data_stream(self, name=None, params=None, headers=None): """ Returns data streams. - :arg name: A comma-separated list of data streams to get; use - `*` to get all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return self.transport.perform_request( "GET", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") def simulate_template(self, body=None, name=None, params=None, headers=None): """ - Simulate resolving the given template name or body + Simulate resolving the given template name or body. :arg body: New index template definition to be simulated, if no index template name is specified - :arg name: The name of the index template + :arg name: The name of the template. :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "POST", @@ -1482,19 +1427,14 @@ def simulate_template(self, body=None, name=None, params=None, headers=None): @query_params("expand_wildcards") def resolve_index(self, name, params=None, headers=None): """ - Returns information about any matching indices, aliases, and data streams - - - .. warning:: + Returns information about any matching indices, aliases, and data streams. - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg name: A comma-separated list of names or wildcard - expressions - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of names or wildcard + expressions. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1505,10 +1445,10 @@ def resolve_index(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) def add_block(self, index, block, params=None, headers=None): @@ -1516,20 +1456,23 @@ def add_block(self, index, block, params=None, headers=None): Adds a block to an index. - :arg index: A comma separated list of indices to add a block to + :arg index: Comma-separated list of indices to add a block to. :arg block: The block to add (one of read, write, read_only or - metadata) + metadata). :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, block): if param in SKIP_IN_PATH: @@ -1545,8 +1488,8 @@ def data_streams_stats(self, name=None, params=None, headers=None): Provides statistics on operations happening in a data stream. - :arg name: A comma-separated list of data stream names; use - `_all` or empty string to perform the operation on all data streams + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return self.transport.perform_request( "GET", @@ -1554,115 +1497,3 @@ def data_streams_stats(self, name=None, params=None, headers=None): params=params, headers=headers, ) - - @query_params() - def promote_data_stream(self, name, params=None, headers=None): - """ - Promotes a data stream from a replicated data stream managed by CCR to a - regular data stream - - - :arg name: The name of the data stream - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return self.transport.perform_request( - "POST", - _make_path("_data_stream", "_promote", name), - params=params, - headers=headers, - ) - - @query_params() - def migrate_to_data_stream(self, name, params=None, headers=None): - """ - Migrates an alias to a data stream - - - :arg name: The name of the alias to migrate - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return self.transport.perform_request( - "POST", - _make_path("_data_stream", "_migrate", name), - params=params, - headers=headers, - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "flush", - "ignore_unavailable", - "run_expensive_tasks", - ) - def disk_usage(self, index, params=None, headers=None): - """ - Analyzes the disk usage of each field of an index or data stream - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: Comma-separated list of indices or data streams to - analyze the disk usage - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flush: Whether flush or not before analyzing the index disk - usage. Defaults to true - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg run_expensive_tasks: Must be set to [true] in order for the - task to be performed. Defaults to false. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "POST", _make_path(index, "_disk_usage"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable" - ) - def field_usage_stats(self, index, params=None, headers=None): - """ - Returns the field usage stats for each field of an index - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of fields to include in the - stats if only a subset of fields should be returned (supports wildcards) - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "GET", - _make_path(index, "_field_usage_stats"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/client/indices.pyi b/opensearchpy/client/indices.pyi index 2393537a..51751d53 100644 --- a/opensearchpy/client/indices.pyi +++ b/opensearchpy/client/indices.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -94,8 +103,8 @@ class IndicesClient(NamespacedClient): index: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -117,8 +126,8 @@ class IndicesClient(NamespacedClient): target: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -139,13 +148,13 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., include_defaults: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -164,10 +173,10 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -188,10 +197,10 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -212,10 +221,10 @@ class IndicesClient(NamespacedClient): index: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -259,10 +268,10 @@ class IndicesClient(NamespacedClient): body: Any, index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., write_index_only: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -283,11 +292,11 @@ class IndicesClient(NamespacedClient): *, index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -330,8 +339,8 @@ class IndicesClient(NamespacedClient): name: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -394,8 +403,8 @@ class IndicesClient(NamespacedClient): self, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -415,8 +424,8 @@ class IndicesClient(NamespacedClient): index: Any, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -436,9 +445,9 @@ class IndicesClient(NamespacedClient): name: Any, *, body: Any, + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., order: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -457,10 +466,10 @@ class IndicesClient(NamespacedClient): self, name: Any, *, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -478,10 +487,10 @@ class IndicesClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -499,8 +508,8 @@ class IndicesClient(NamespacedClient): self, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -521,13 +530,13 @@ class IndicesClient(NamespacedClient): index: Optional[Any] = ..., name: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., include_defaults: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -547,11 +556,11 @@ class IndicesClient(NamespacedClient): body: Any, index: Optional[Any] = ..., allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., preserve_existing: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -581,7 +590,6 @@ class IndicesClient(NamespacedClient): include_segment_file_sizes: Optional[Any] = ..., include_unloaded_segments: Optional[Any] = ..., level: Optional[Any] = ..., - types: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -731,26 +739,6 @@ class IndicesClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def flush_synced( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... def shard_stores( self, *, @@ -801,9 +789,9 @@ class IndicesClient(NamespacedClient): target: Any, *, body: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., copy_settings: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -825,9 +813,9 @@ class IndicesClient(NamespacedClient): target: Any, *, body: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., copy_settings: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -849,57 +837,9 @@ class IndicesClient(NamespacedClient): *, body: Optional[Any] = ..., new_index: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def freeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def unfreeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., + dry_run: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -915,30 +855,11 @@ class IndicesClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def reload_search_analyzers( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... def create_data_stream( self, name: Any, *, + body: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -956,7 +877,6 @@ class IndicesClient(NamespacedClient): self, name: Any, *, - expand_wildcards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -974,8 +894,8 @@ class IndicesClient(NamespacedClient): self, name: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -994,10 +914,10 @@ class IndicesClient(NamespacedClient): self, name: Any, *, + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1015,10 +935,10 @@ class IndicesClient(NamespacedClient): self, *, name: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., flat_settings: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1038,9 +958,9 @@ class IndicesClient(NamespacedClient): *, body: Any, cause: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1060,9 +980,9 @@ class IndicesClient(NamespacedClient): *, body: Optional[Any] = ..., cause: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1080,7 +1000,6 @@ class IndicesClient(NamespacedClient): self, *, name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1100,9 +1019,9 @@ class IndicesClient(NamespacedClient): body: Optional[Any] = ..., name: Optional[Any] = ..., cause: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., create: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -1140,10 +1059,10 @@ class IndicesClient(NamespacedClient): block: Any, *, allow_no_indices: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -1175,80 +1094,3 @@ class IndicesClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def promote_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def migrate_to_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def disk_usage( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - run_expensive_tasks: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def field_usage_stats( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index fb9d4f79..a14bc0f6 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -50,8 +50,8 @@ def get_pipeline(self, id=None, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers @@ -68,8 +68,8 @@ def put_pipeline(self, id, body, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ for param in (id, body): @@ -94,8 +94,8 @@ def delete_pipeline(self, id, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead): Operation timeout for connection - to master node. + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: @@ -117,7 +117,7 @@ def simulate(self, body, id=None, params=None, headers=None): :arg body: The simulate definition :arg id: Pipeline ID. :arg verbose: Verbose mode. Display data output for each - processor in executed pipeline. + processor in executed pipeline. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 9dc1e1ab..6b73177b 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -73,10 +73,10 @@ def info(self, node_id=None, metric=None, params=None, headers=None): node you're connecting to, leave empty to get information from all nodes. :arg metric: Comma-separated list of metrics you wish returned. - Leave empty to return all. Valid choices: settings, os, process, jvm, - thread_pool, transport, http, plugins, ingest - :arg flat_settings: Return settings in flat format. (default: - false) + Leave empty to return all. Valid choices are settings, os, process, jvm, + thread_pool, transport, http, plugins, ingest. + :arg flat_settings: Return settings in flat format. Default is + false. :arg timeout: Operation timeout. """ return self.transport.perform_request( @@ -105,13 +105,13 @@ def stats( node you're connecting to, leave empty to get information from all nodes. :arg metric: Limit the information returned to the specified - metrics. Valid choices: _all, breaker, fs, http, indices, jvm, os, - process, thread_pool, transport, discovery, indexing_pressure + metrics. Valid choices are _all, breaker, fs, http, indices, jvm, os, + process, thread_pool, transport, discovery, indexing_pressure. :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) - metric isn't specified. Valid choices: _all, store, indexing, get, + metric isn't specified. Valid choices are _all, store, indexing, get, search, merge, flush, refresh, query_cache, fielddata, docs, warmer, - completion, segments, translog, suggest, request_cache, recovery + completion, segments, translog, suggest, request_cache, recovery. :arg completion_fields: Comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards). :arg fielddata_fields: Comma-separated list of fields for @@ -122,9 +122,9 @@ def stats( index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested). (default: false) + applies if segment stats are requested). Default is false. :arg level: Return indices stats aggregated at index, node or - shard level. Valid choices: indices, node, shards + shard level. Valid choices are indices, node, shards. :arg timeout: Operation timeout. :arg types: Comma-separated list of document types for the `indexing` index metric. @@ -148,16 +148,16 @@ def hot_threads(self, node_id=None, params=None, headers=None): the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes. - :arg doc_type: The type to sample. Valid choices: cpu, wait, - block + :arg doc_type: The type to sample. Valid choices are cpu, wait, + block. :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty - task queue. (default: True) + task queue. Default is True. :arg interval: The interval for the second sampling of threads. - :arg snapshots: Number of samples of thread stacktrace. - (default: 10) + :arg snapshots: Number of samples of thread stacktrace. Default + is 10. :arg threads: Specify the number of threads to provide - information for. (default: 3) + information for. Default is 3. :arg timeout: Operation timeout. """ # type is a reserved word so it cannot be used, use doc_type instead @@ -182,7 +182,7 @@ def usage(self, node_id=None, metric=None, params=None, headers=None): node you're connecting to, leave empty to get information from all nodes. :arg metric: Limit the information returned to the specified - metrics. Valid choices: _all, rest_actions + metrics. Valid choices are _all, rest_actions. :arg timeout: Operation timeout. """ return self.transport.perform_request( diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 32a362b3..1af50448 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -7,6 +7,17 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -17,25 +28,25 @@ class SecurityClient(NamespacedClient): def get_account_details(self, params=None, headers=None): """ Returns account details for the current user. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "account"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/account", params=params, headers=headers ) @query_params() def change_password(self, body, params=None, headers=None): """ Changes the password for the current user. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "account"), + "/_plugins/_security/api/account", params=params, headers=headers, body=body, @@ -45,10 +56,13 @@ def change_password(self, body, params=None, headers=None): def get_action_group(self, action_group, params=None, headers=None): """ Retrieves one action group. + + + :arg action_group: Action group to retrieve. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return self.transport.perform_request( @@ -62,10 +76,11 @@ def get_action_group(self, action_group, params=None, headers=None): def get_action_groups(self, params=None, headers=None): """ Retrieves all action groups. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups/", params=params, headers=headers, ) @@ -73,11 +88,14 @@ def get_action_groups(self, params=None, headers=None): @query_params() def delete_action_group(self, action_group, params=None, headers=None): """ - Deletes the specified action group. + Delete a specified action group. + + + :arg action_group: Action group to delete. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return self.transport.perform_request( @@ -91,6 +109,10 @@ def delete_action_group(self, action_group, params=None, headers=None): def create_action_group(self, action_group, body, params=None, headers=None): """ Creates or replaces the specified action group. + + + :arg action_group: The name of the action group to create or + replace """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -108,6 +130,8 @@ def create_action_group(self, action_group, body, params=None, headers=None): def patch_action_group(self, action_group, body, params=None, headers=None): """ Updates individual attributes of an action group. + + """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -125,13 +149,15 @@ def patch_action_group(self, action_group, body, params=None, headers=None): def patch_action_groups(self, body, params=None, headers=None): """ Creates, updates, or deletes multiple action groups in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups", params=params, headers=headers, body=body, @@ -140,7 +166,9 @@ def patch_action_groups(self, body, params=None, headers=None): @query_params() def get_user(self, username, params=None, headers=None): """ - Retrieves one user. + Retrieve one internal user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -155,11 +183,12 @@ def get_user(self, username, params=None, headers=None): @query_params() def get_users(self, params=None, headers=None): """ - Retrieves all users. + Retrieve all internal users. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, ) @@ -167,7 +196,9 @@ def get_users(self, params=None, headers=None): @query_params() def delete_user(self, username, params=None, headers=None): """ - Deletes the specified user. + Delete the specified user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -183,6 +214,8 @@ def delete_user(self, username, params=None, headers=None): def create_user(self, username, body, params=None, headers=None): """ Creates or replaces the specified user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -200,6 +233,8 @@ def create_user(self, username, body, params=None, headers=None): def patch_user(self, username, body, params=None, headers=None): """ Updates individual attributes of an internal user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -217,13 +252,15 @@ def patch_user(self, username, body, params=None, headers=None): def patch_users(self, body, params=None, headers=None): """ Creates, updates, or deletes multiple internal users in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, body=body, @@ -233,6 +270,8 @@ def patch_users(self, body, params=None, headers=None): def get_role(self, role, params=None, headers=None): """ Retrieves one role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -248,18 +287,18 @@ def get_role(self, role, params=None, headers=None): def get_roles(self, params=None, headers=None): """ Retrieves all roles. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "roles"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/roles/", params=params, headers=headers ) @query_params() def delete_role(self, role, params=None, headers=None): """ - Deletes the specified role. + Delete the specified role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -275,6 +314,8 @@ def delete_role(self, role, params=None, headers=None): def create_role(self, role, body, params=None, headers=None): """ Creates or replaces the specified role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -292,6 +333,8 @@ def create_role(self, role, body, params=None, headers=None): def patch_role(self, role, body, params=None, headers=None): """ Updates individual attributes of a role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -309,13 +352,15 @@ def patch_role(self, role, body, params=None, headers=None): def patch_roles(self, body, params=None, headers=None): """ Creates, updates, or deletes multiple roles in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "roles"), + "/_plugins/_security/api/roles", params=params, headers=headers, body=body, @@ -325,6 +370,8 @@ def patch_roles(self, body, params=None, headers=None): def get_role_mapping(self, role, params=None, headers=None): """ Retrieves one role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -340,10 +387,11 @@ def get_role_mapping(self, role, params=None, headers=None): def get_role_mappings(self, params=None, headers=None): """ Retrieves all role mappings. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, ) @@ -352,6 +400,8 @@ def get_role_mappings(self, params=None, headers=None): def delete_role_mapping(self, role, params=None, headers=None): """ Deletes the specified role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -367,6 +417,8 @@ def delete_role_mapping(self, role, params=None, headers=None): def create_role_mapping(self, role, body, params=None, headers=None): """ Creates or replaces the specified role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -384,6 +436,8 @@ def create_role_mapping(self, role, body, params=None, headers=None): def patch_role_mapping(self, role, body, params=None, headers=None): """ Updates individual attributes of a role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -401,13 +455,15 @@ def patch_role_mapping(self, role, body, params=None, headers=None): def patch_role_mappings(self, body, params=None, headers=None): """ Creates or updates multiple role mappings in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, body=body, @@ -417,6 +473,8 @@ def patch_role_mappings(self, body, params=None, headers=None): def get_tenant(self, tenant, params=None, headers=None): """ Retrieves one tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -432,18 +490,18 @@ def get_tenant(self, tenant, params=None, headers=None): def get_tenants(self, params=None, headers=None): """ Retrieves all tenants. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "tenants"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/tenants/", params=params, headers=headers ) @query_params() def delete_tenant(self, tenant, params=None, headers=None): """ - Deletes the specified tenant. + Delete the specified tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -459,6 +517,8 @@ def delete_tenant(self, tenant, params=None, headers=None): def create_tenant(self, tenant, body, params=None, headers=None): """ Creates or replaces the specified tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -476,6 +536,8 @@ def create_tenant(self, tenant, body, params=None, headers=None): def patch_tenant(self, tenant, body, params=None, headers=None): """ Add, delete, or modify a single tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -493,13 +555,15 @@ def patch_tenant(self, tenant, body, params=None, headers=None): def patch_tenants(self, body, params=None, headers=None): """ Add, delete, or modify multiple tenants in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "tenants"), + "/_plugins/_security/api/tenants/", params=params, headers=headers, body=body, @@ -508,11 +572,12 @@ def patch_tenants(self, body, params=None, headers=None): @query_params() def get_configuration(self, params=None, headers=None): """ - Retrieves the current Security plugin configuration in JSON format. + Returns the current Security plugin configuration in JSON format. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, ) @@ -520,14 +585,16 @@ def get_configuration(self, params=None, headers=None): @query_params() def update_configuration(self, body, params=None, headers=None): """ - Retrieves the current Security plugin configuration in JSON format. + Adds or updates the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "securityconfig", "config"), + "/_plugins/_security/api/securityconfig/config", params=params, headers=headers, body=body, @@ -536,14 +603,16 @@ def update_configuration(self, body, params=None, headers=None): @query_params() def patch_configuration(self, body, params=None, headers=None): """ - Updates the existing configuration using the REST API. + A PATCH call is used to update the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, body=body, @@ -553,6 +622,8 @@ def patch_configuration(self, body, params=None, headers=None): def get_distinguished_names(self, cluster_name=None, params=None, headers=None): """ Retrieves all distinguished names in the allow list. + + """ return self.transport.perform_request( "GET", @@ -562,13 +633,19 @@ def get_distinguished_names(self, cluster_name=None, params=None, headers=None): ) @query_params() - def update_distinguished_names(self, cluster_name, body, params=None, headers=None): + def update_distinguished_names( + self, cluster_name, body=None, params=None, headers=None + ): """ - Adds or updates the specified distinguished names in the cluster's or node's allow list. + Adds or updates the specified distinguished names in the cluster’s or node’s + allow list. + + """ - for param in (cluster_name, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") + if cluster_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'cluster_name'." + ) return self.transport.perform_request( "PUT", @@ -581,11 +658,14 @@ def update_distinguished_names(self, cluster_name, body, params=None, headers=No @query_params() def delete_distinguished_names(self, cluster_name, params=None, headers=None): """ - Deletes all distinguished names in the specified cluster's or node's allow list. + Deletes all distinguished names in the specified cluster’s or node’s allow + list. + + """ if cluster_name in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'cluster-name'." + "Empty value passed for a required argument 'cluster_name'." ) return self.transport.perform_request( @@ -598,25 +678,22 @@ def delete_distinguished_names(self, cluster_name, params=None, headers=None): @query_params() def get_certificates(self, params=None, headers=None): """ - Retrieves the cluster's security certificates. + Retrieves the cluster’s security certificates. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "ssl", "certs"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/ssl/certs", params=params, headers=headers ) @query_params() def reload_transport_certificates(self, params=None, headers=None): """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload transport layer communication certificates. + """ return self.transport.perform_request( "PUT", - _make_path( - "_opendistro", "_security", "api", "ssl", "transport", "reloadcerts" - ), + "/_plugins/_security/api/ssl/transport/reloadcerts", params=params, headers=headers, ) @@ -624,11 +701,12 @@ def reload_transport_certificates(self, params=None, headers=None): @query_params() def reload_http_certificates(self, params=None, headers=None): """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload HTTP layer communication certificates. + """ return self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "ssl", "http", "reloadcerts"), + "/_plugins/_security/api/ssl/http/reloadcerts", params=params, headers=headers, ) @@ -637,12 +715,10 @@ def reload_http_certificates(self, params=None, headers=None): def flush_cache(self, params=None, headers=None): """ Flushes the Security plugin user, authentication, and authorization cache. + """ return self.transport.perform_request( - "DELETE", - _make_path("_plugins", "_security", "api", "cache"), - params=params, - headers=headers, + "DELETE", "/_plugins/_security/api/cache", params=params, headers=headers ) @query_params() @@ -658,13 +734,11 @@ def health(self, params=None, headers=None): @query_params() def get_audit_configuration(self, params=None, headers=None): """ - A GET call retrieves the audit configuration. + Retrieves the audit configuration. + """ return self.transport.perform_request( - "GET", - _make_path("_opendistro", "_security", "api", "audit"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/audit", params=params, headers=headers ) @query_params() @@ -672,6 +746,7 @@ def update_audit_configuration(self, body, params=None, headers=None): """ Updates the audit configuration. + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -688,13 +763,33 @@ def update_audit_configuration(self, body, params=None, headers=None): def patch_audit_configuration(self, body, params=None, headers=None): """ A PATCH call is used to update specified fields in the audit configuration. + + + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return self.transport.perform_request( + "PATCH", + "/_plugins/_security/api/audit", + params=params, + headers=headers, + body=body, + ) + + @query_params() + def patch_distinguished_names(self, body, params=None, headers=None): + """ + Bulk update of distinguished names. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_opendistro", "_security", "api", "audit"), + "/_plugins/_security/api/nodesdn", params=params, headers=headers, body=body, diff --git a/opensearchpy/client/security.pyi b/opensearchpy/client/security.pyi index c729d5d3..dbcc1179 100644 --- a/opensearchpy/client/security.pyi +++ b/opensearchpy/client/security.pyi @@ -6,194 +6,263 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union -from .utils import NamespacedClient as NamespacedClient +from .utils import NamespacedClient class SecurityClient(NamespacedClient): def get_account_details( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def change_password( self, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def get_action_group( self, action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def get_action_groups( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def delete_action_group( self, action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def create_action_group( self, action_group: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def patch_action_group( self, action_group: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def patch_action_groups( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def get_user( self, username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def get_users( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def delete_user( self, username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def create_user( self, username: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def patch_user( self, username: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... def patch_users( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_roles( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def create_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_roles( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_role_mappings( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def create_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_role_mappings( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_tenant( - self, - tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_tenants( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_tenant( - self, - tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_tenant( - self, - tenant: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_tenant( - self, - tenant: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_tenants( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def update_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_distinguished_names( - self, - cluster_name: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_distinguished_names( self, - cluster_name: Any, + *, body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_distinguished_names( - self, - cluster_name: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def reload_transport_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def reload_http_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def flush_cache( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def health( + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_role( self, + role: Any, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -208,13 +277,9 @@ class SecurityClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def get_audit_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def update_audit_configuration( + def get_roles( self, *, - body: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -228,6 +293,528 @@ class SecurityClient(NamespacedClient): params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... - def patch_audit_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... + def delete_role( + self, + role: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def create_role( + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_role( + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_roles( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_role_mapping( + self, + role: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_role_mappings( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def delete_role_mapping( + self, + role: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def create_role_mapping( + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_role_mapping( + self, + role: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_role_mappings( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_tenant( + self, + tenant: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_tenants( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def delete_tenant( + self, + tenant: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def create_tenant( + self, + tenant: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_tenant( + self, + tenant: Any, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_tenants( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_configuration( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def update_configuration( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_configuration( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_distinguished_names( + self, + *, + cluster_name: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def update_distinguished_names( + self, + cluster_name: Any, + *, + body: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def delete_distinguished_names( + self, + cluster_name: Any, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_certificates( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def reload_transport_certificates( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def reload_http_certificates( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def flush_cache( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def health( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def get_audit_configuration( + self, + *, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def update_audit_configuration( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_audit_configuration( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... + def patch_distinguished_names( + self, + *, + body: Any, + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index 200fff96..ac0683a5 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -25,25 +25,36 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") def create(self, repository, snapshot, body=None, params=None, headers=None): """ Creates a snapshot in a repository. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: The snapshot definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -57,18 +68,19 @@ def create(self, repository, snapshot, body=None, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") def delete(self, repository, snapshot, params=None, headers=None): """ Deletes a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -82,33 +94,25 @@ def delete(self, repository, snapshot, params=None, headers=None): ) @query_params( - "ignore_unavailable", - "include_repository", - "index_details", - "master_timeout", - "cluster_manager_timeout", - "verbose", + "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) def get(self, repository, snapshot, params=None, headers=None): """ Returns information about a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg include_repository: Whether to include the repository name - in the snapshot info. Defaults to true. - :arg index_details: Whether to include details of each index in - the snapshot, if those details are available. Defaults to false. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg verbose: Whether to show verbose snapshot info or only show - the basic info found in the repository index blob + the basic info found in the repository index blob. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -121,7 +125,7 @@ def get(self, repository, snapshot, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def delete_repository(self, repository, params=None, headers=None): """ Deletes a repository. @@ -129,11 +133,12 @@ def delete_repository(self, repository, params=None, headers=None): :arg repository: Name of the snapshot repository to unregister. Wildcard (`*`) patterns are supported. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -145,38 +150,40 @@ def delete_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "local", "master_timeout") def get_repository(self, repository=None, params=None, headers=None): """ Returns information about a repository. - :arg repository: A comma-separated list of repository names + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_snapshot", repository), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout", "verify") + @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") def create_repository(self, repository, body, params=None, headers=None): """ Creates a repository. - :arg repository: A repository name + :arg repository: Repository name. :arg body: The repository definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout - :arg verify: Whether to verify the repository after creation + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. + :arg verify: Whether to verify the repository after creation. """ for param in (repository, body): if param in SKIP_IN_PATH: @@ -190,21 +197,22 @@ def create_repository(self, repository, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") def restore(self, repository, snapshot, body=None, params=None, headers=None): """ Restores a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: Details of what to restore - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -218,21 +226,22 @@ def restore(self, repository, snapshot, body=None, params=None, headers=None): body=body, ) - @query_params("ignore_unavailable", "master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") def status(self, repository=None, snapshot=None, params=None, headers=None): """ Returns information about the status of a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", @@ -241,18 +250,19 @@ def status(self, repository=None, snapshot=None, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def verify_repository(self, repository, params=None, headers=None): """ Verifies a repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -264,18 +274,19 @@ def verify_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") + @query_params("cluster_manager_timeout", "master_timeout", "timeout") def cleanup_repository(self, repository, params=None, headers=None): """ Removes stale data from repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -287,7 +298,7 @@ def cleanup_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") def clone( self, repository, snapshot, target_snapshot, body, params=None, headers=None ): @@ -295,14 +306,15 @@ def clone( Clones indices from one snapshot into another snapshot in the same repository. - :arg repository: A repository name - :arg snapshot: The name of the snapshot to clone from - :arg target_snapshot: The name of the cloned snapshot to create + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg target_snapshot: The name of the cloned snapshot to create. :arg body: The snapshot clone definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot, target_snapshot, body): if param in SKIP_IN_PATH: @@ -315,56 +327,3 @@ def clone( headers=headers, body=body, ) - - @query_params( - "blob_count", - "concurrency", - "detailed", - "early_read_node_count", - "max_blob_size", - "max_total_data_size", - "rare_action_probability", - "rarely_abort_writes", - "read_node_count", - "seed", - "timeout", - ) - def repository_analyze(self, repository, params=None, headers=None): - """ - Analyzes a repository for correctness and performance - - - :arg repository: A repository name - :arg blob_count: Number of blobs to create during the test. - Defaults to 100. - :arg concurrency: Number of operations to run concurrently - during the test. Defaults to 10. - :arg detailed: Whether to return detailed results or a summary. - Defaults to 'false' so that only the summary is returned. - :arg early_read_node_count: Number of nodes on which to perform - an early read on a blob, i.e. before writing has completed. Early reads - are rare actions so the 'rare_action_probability' parameter is also - relevant. Defaults to 2. - :arg max_blob_size: Maximum size of a blob to create during the - test, e.g '1gb' or '100mb'. Defaults to '10mb'. - :arg max_total_data_size: Maximum total size of all blobs to - create during the test, e.g '1tb' or '100gb'. Defaults to '1gb'. - :arg rare_action_probability: Probability of taking a rare - action such as an early read or an overwrite. Defaults to 0.02. - :arg rarely_abort_writes: Whether to rarely abort writes before - they complete. Defaults to 'true'. - :arg read_node_count: Number of nodes on which to read a blob - after writing. Defaults to 10. - :arg seed: Seed for the random number generator used to create - the test workload. Defaults to a random value. - :arg timeout: Explicit operation timeout. Defaults to '30s'. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'repository'.") - - return self.transport.perform_request( - "POST", - _make_path("_snapshot", repository, "_analyze"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/client/snapshot.pyi b/opensearchpy/client/snapshot.pyi index b1db95c0..8d50f086 100644 --- a/opensearchpy/client/snapshot.pyi +++ b/opensearchpy/client/snapshot.pyi @@ -24,6 +24,15 @@ # specific language governing permissions and limitations # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient @@ -35,8 +44,8 @@ class SnapshotClient(NamespacedClient): snapshot: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -56,8 +65,8 @@ class SnapshotClient(NamespacedClient): repository: Any, snapshot: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -76,11 +85,9 @@ class SnapshotClient(NamespacedClient): repository: Any, snapshot: Any, *, + cluster_manager_timeout: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., - include_repository: Optional[Any] = ..., - index_details: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., verbose: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -99,8 +106,8 @@ class SnapshotClient(NamespacedClient): self, repository: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -119,9 +126,9 @@ class SnapshotClient(NamespacedClient): self, *, repository: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., local: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -140,8 +147,8 @@ class SnapshotClient(NamespacedClient): repository: Any, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., verify: Optional[Any] = ..., pretty: Optional[bool] = ..., @@ -163,8 +170,8 @@ class SnapshotClient(NamespacedClient): snapshot: Any, *, body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -184,9 +191,9 @@ class SnapshotClient(NamespacedClient): *, repository: Optional[Any] = ..., snapshot: Optional[Any] = ..., + cluster_manager_timeout: Optional[Any] = ..., ignore_unavailable: Optional[Any] = ..., master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., @@ -204,8 +211,8 @@ class SnapshotClient(NamespacedClient): self, repository: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -224,8 +231,8 @@ class SnapshotClient(NamespacedClient): self, repository: Any, *, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., @@ -247,36 +254,8 @@ class SnapshotClient(NamespacedClient): target_snapshot: Any, *, body: Any, - master_timeout: Optional[Any] = ..., cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def repository_analyze( - self, - repository: Any, - *, - blob_count: Optional[Any] = ..., - concurrency: Optional[Any] = ..., - detailed: Optional[Any] = ..., - early_read_node_count: Optional[Any] = ..., - max_blob_size: Optional[Any] = ..., - max_total_data_size: Optional[Any] = ..., - rare_action_probability: Optional[Any] = ..., - rarely_abort_writes: Optional[Any] = ..., - read_node_count: Optional[Any] = ..., - seed: Optional[Any] = ..., - timeout: Optional[Any] = ..., + master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index ff76a3a6..0cc7a4b2 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -57,9 +57,10 @@ def list(self, params=None, headers=None): :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information. + :arg detailed: Return detailed task information. Default is + false. :arg group_by: Group tasks by nodes or parent/child - relationships. Valid choices: nodes, parents, none + relationships. Valid choices are nodes, parents, none. :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all @@ -68,7 +69,7 @@ def list(self, params=None, headers=None): (node_id:task_number). Set to -1 to return all. :arg timeout: Operation timeout. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. + operation has completed before returning. Default is false. """ return self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers @@ -91,7 +92,7 @@ def cancel(self, task_id=None, params=None, headers=None): :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. + operation has completed before returning. Default is false. """ return self.transport.perform_request( "POST", @@ -110,7 +111,7 @@ def get(self, task_id=None, params=None, headers=None): (node_id:task_number). :arg timeout: Operation timeout. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. + operation has completed before returning. Default is false. """ if task_id in SKIP_IN_PATH: warnings.warn( diff --git a/utils/generate-api.py b/utils/generate-api.py index fffd0e82..2cc95013 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -301,9 +301,10 @@ def all_parts(self): parts.update(url.get("parts", {})) for p in parts: - parts[p]["required"] = all( - p in url.get("parts", {}) for url in self._def["url"]["paths"] - ) + if "required" not in parts[p]: + parts[p]["required"] = all( + p in url.get("parts", {}) for url in self._def["url"]["paths"] + ) parts[p]["type"] = "Any" # This piece of logic corresponds to calling @@ -555,6 +556,8 @@ def read_modules(): # Group the data in the current group by the "path" key paths = [] + all_paths_have_deprecation = True + for key2, value2 in groupby(value, key=itemgetter("path")): # Extract the HTTP methods from the data in the current subgroup methods = [] @@ -567,8 +570,10 @@ def read_modules(): documentation = {"description": z["description"]} api.update({"documentation": documentation}) - if "deprecation_message" not in api and "x-deprecation-message" in z: - api.update({"deprecation_message": z["x-deprecation-message"]}) + if "x-deprecation-message" in z: + x_deprecation_message = z["x-deprecation-message"] + else: + all_paths_have_deprecation = False if "params" not in api and "params" in z: api.update({"params": z["params"]}) @@ -637,6 +642,11 @@ def read_modules(): paths.append({"path": key2, "methods": methods}) api.update({"url": {"paths": paths}}) + if all_paths_have_deprecation and x_deprecation_message is not None: + api.update({"deprecation_message": x_deprecation_message}) + + if namespace == "indices" and name == "put_mapping": + api["url"]["paths"][0]["parts"]["index"].update({"required": False}) if namespace not in modules: modules[namespace] = Module(namespace) diff --git a/utils/templates/base b/utils/templates/base index 47bb5956..bf270aee 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -20,11 +20,13 @@ {% if api.params|list|length %} {% for p, info in api.params %} + {% if info.description %} {% filter wordwrap(72, wrapstring="\n ") %} - :arg {{ p }}{% if info.deprecated %} (Deprecated: {{ info['deprecation_message'][:-1] }}){% endif %}: {{ info.description }}{% if info.options %} Valid choices: {{ info.options|join(", ") }}{% endif %} - {% if info.default is defined %}{% if info.default is not none %}{% if info.default is sameas(false) %} (default: false){% else %} (default: {{ info.default }}){% endif %}{% endif %}{% endif %} + :arg {{ p }}{% if info.deprecated %} (Deprecated: {{ info['deprecation_message'][:-1] }}.){% endif %}: {{ info.description }} {% if info.options %}Valid choices are {{ info.options|join(", ") }}.{% endif %} + {% if info.default is defined %}{% if info.default is not none %}{% if info.default is sameas(false) %}Default is false.{% else %}Default is {{ info.default }}.{% endif %}{% endif %}{% endif %} {% endfilter %} + {% endif %} {% endfor %} {% endif %} """ From d6e0b5f54ab04b99ead162cf51e846f8ea501223 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Thu, 26 Oct 2023 07:59:07 -0700 Subject: [PATCH 34/80] added remote store client (#552) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/client/__init__.py | 2 + opensearchpy/_async/client/__init__.pyi | 2 + opensearchpy/_async/client/remote_store.py | 40 ++++++++++++++++++ opensearchpy/_async/client/remote_store.pyi | 41 +++++++++++++++++++ opensearchpy/client/__init__.py | 2 + opensearchpy/client/__init__.pyi | 2 + opensearchpy/client/remote_store.py | 40 ++++++++++++++++++ opensearchpy/client/remote_store.pyi | 41 +++++++++++++++++++ .../test_client/test_remote_store.py | 15 +++++++ 10 files changed, 186 insertions(+) create mode 100644 opensearchpy/_async/client/remote_store.py create mode 100644 opensearchpy/_async/client/remote_store.pyi create mode 100644 opensearchpy/client/remote_store.py create mode 100644 opensearchpy/client/remote_store.pyi create mode 100644 test_opensearchpy/test_client/test_remote_store.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c55a5d1..6803cf4c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added benchmarks ([#537](https://github.com/opensearch-project/opensearch-py/pull/537)) - Added guide on making raw JSON REST requests ([#542](https://github.com/opensearch-project/opensearch-py/pull/542)) - Added support for AWS SigV4 for urllib3 ([#547](https://github.com/opensearch-project/opensearch-py/pull/547)) +- Added `remote store` client APIs ([#552](https://github.com/opensearch-project/opensearch-py/pull/552)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 7f0d4f98..a7587f82 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -50,6 +50,7 @@ from .nodes import NodesClient from .plugins import PluginsClient from .remote import RemoteClient +from .remote_store import RemoteStoreClient from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient @@ -217,6 +218,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.security = SecurityClient(self) self.snapshot = SnapshotClient(self) self.tasks = TasksClient(self) + self.remote_store = RemoteStoreClient(self) self.features = FeaturesClient(self) diff --git a/opensearchpy/_async/client/__init__.pyi b/opensearchpy/_async/client/__init__.pyi index 70a93d19..32ea967e 100644 --- a/opensearchpy/_async/client/__init__.pyi +++ b/opensearchpy/_async/client/__init__.pyi @@ -48,6 +48,7 @@ from .indices import IndicesClient from .ingest import IngestClient from .nodes import NodesClient from .remote import RemoteClient +from .remote_store import RemoteStoreClient from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient @@ -67,6 +68,7 @@ class AsyncOpenSearch(object): security: SecurityClient snapshot: SnapshotClient tasks: TasksClient + remote_store: RemoteStoreClient def __init__( self, hosts: Any = ..., diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py new file mode 100644 index 00000000..731d1233 --- /dev/null +++ b/opensearchpy/_async/client/remote_store.py @@ -0,0 +1,40 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from .utils import SKIP_IN_PATH, NamespacedClient, query_params + + +class RemoteStoreClient(NamespacedClient): + @query_params("cluster_manager_timeout", "wait_for_completion") + async def restore(self, body, params=None, headers=None): + """ + Restores from remote store. + + + :arg body: Comma-separated list of index IDs + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. (default: false) + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "POST", "/_remotestore/_restore", params=params, headers=headers, body=body + ) diff --git a/opensearchpy/_async/client/remote_store.pyi b/opensearchpy/_async/client/remote_store.pyi new file mode 100644 index 00000000..8ea2d077 --- /dev/null +++ b/opensearchpy/_async/client/remote_store.pyi @@ -0,0 +1,41 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + +from typing import Any, Collection, MutableMapping, Optional, Tuple, Union + +from .utils import NamespacedClient + +class RemoteStoreClient(NamespacedClient): + async def restore( + self, + *, + body: Any, + cluster_manager_timeout: Optional[Any] = ..., + wait_for_completion: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 25a779fd..6a8dffb6 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -50,6 +50,7 @@ from .nodes import NodesClient from .plugins import PluginsClient from .remote import RemoteClient +from .remote_store import RemoteStoreClient from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient @@ -217,6 +218,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.security = SecurityClient(self) self.snapshot = SnapshotClient(self) self.tasks = TasksClient(self) + self.remote_store = RemoteStoreClient(self) self.features = FeaturesClient(self) diff --git a/opensearchpy/client/__init__.pyi b/opensearchpy/client/__init__.pyi index 5bf53ec7..9ad72a83 100644 --- a/opensearchpy/client/__init__.pyi +++ b/opensearchpy/client/__init__.pyi @@ -48,6 +48,7 @@ from .indices import IndicesClient from .ingest import IngestClient from .nodes import NodesClient from .remote import RemoteClient +from .remote_store import RemoteStoreClient from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient @@ -67,6 +68,7 @@ class OpenSearch(object): security: SecurityClient snapshot: SnapshotClient tasks: TasksClient + remote_store: RemoteStoreClient def __init__( self, hosts: Any = ..., diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py new file mode 100644 index 00000000..a8fcfe06 --- /dev/null +++ b/opensearchpy/client/remote_store.py @@ -0,0 +1,40 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from .utils import SKIP_IN_PATH, NamespacedClient, query_params + + +class RemoteStoreClient(NamespacedClient): + @query_params("cluster_manager_timeout", "wait_for_completion") + def restore(self, body, params=None, headers=None): + """ + Restores from remote store. + + + :arg body: Comma-separated list of index IDs + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. (default: false) + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return self.transport.perform_request( + "POST", "/_remotestore/_restore", params=params, headers=headers, body=body + ) diff --git a/opensearchpy/client/remote_store.pyi b/opensearchpy/client/remote_store.pyi new file mode 100644 index 00000000..99928f3b --- /dev/null +++ b/opensearchpy/client/remote_store.pyi @@ -0,0 +1,41 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + +from typing import Any, Collection, MutableMapping, Optional, Tuple, Union + +from .utils import NamespacedClient + +class RemoteStoreClient(NamespacedClient): + def restore( + self, + *, + body: Any, + cluster_manager_timeout: Optional[Any] = ..., + wait_for_completion: Optional[Any] = ..., + pretty: Optional[bool] = ..., + human: Optional[bool] = ..., + error_trace: Optional[bool] = ..., + format: Optional[str] = ..., + filter_path: Optional[Union[str, Collection[str]]] = ..., + request_timeout: Optional[Union[int, float]] = ..., + ignore: Optional[Union[int, Collection[int]]] = ..., + opaque_id: Optional[str] = ..., + http_auth: Optional[Union[str, Tuple[str, str]]] = ..., + api_key: Optional[Union[str, Tuple[str, str]]] = ..., + params: Optional[MutableMapping[str, Any]] = ..., + headers: Optional[MutableMapping[str, str]] = ..., + ) -> Any: ... diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py new file mode 100644 index 00000000..eb6187b4 --- /dev/null +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -0,0 +1,15 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +from test_opensearchpy.test_cases import OpenSearchTestCase + + +class TestRemoteStore(OpenSearchTestCase): + def test_remote_store_restore(self): + self.client.remote_store.restore(body=["index-1"]) + self.assert_url_called("POST", "/_remotestore/_restore") From c14f8ac6d7d42f7691fbfa666a7237dd56104878 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 26 Oct 2023 11:31:13 -0400 Subject: [PATCH 35/80] Override put_mapping definition to make index name optional. (#553) Signed-off-by: dblock Signed-off-by: roma2023 --- dev-requirements.txt | 1 + opensearchpy/_async/client/remote_store.py | 2 +- opensearchpy/client/remote_store.py | 2 +- utils/generate-api.py | 16 +++++++++++++-- .../overrides/indices/put_mapping.json | 20 +++++++++++++++++++ 5 files changed, 37 insertions(+), 4 deletions(-) create mode 100644 utils/templates/overrides/indices/put_mapping.json diff --git a/dev-requirements.txt b/dev-requirements.txt index 04cfb3e8..a79a1a0b 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -7,6 +7,7 @@ sphinx<7.3 sphinx_rtd_theme jinja2 pytz +deepmerge # No wheels for Python 3.10 yet! numpy; python_version<"3.10" diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index 731d1233..457369ac 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -30,7 +30,7 @@ async def restore(self, body, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. (default: false) + operation has completed before returning. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index a8fcfe06..38d8a1c6 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -30,7 +30,7 @@ def restore(self, body, params=None, headers=None): :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning. (default: false) + operation has completed before returning. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") diff --git a/utils/generate-api.py b/utils/generate-api.py index 2cc95013..a4032765 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -25,6 +25,7 @@ # specific language governing permissions and limitations # under the License. +import json import os import re from functools import lru_cache @@ -33,6 +34,7 @@ from pathlib import Path import black +import deepmerge import requests import unasync import urllib3 @@ -645,8 +647,7 @@ def read_modules(): if all_paths_have_deprecation and x_deprecation_message is not None: api.update({"deprecation_message": x_deprecation_message}) - if namespace == "indices" and name == "put_mapping": - api["url"]["paths"][0]["parts"]["index"].update({"required": False}) + api = apply_patch(namespace, name, api) if namespace not in modules: modules[namespace] = Module(namespace) @@ -657,6 +658,17 @@ def read_modules(): return modules +def apply_patch(namespace, name, api): + override_file_path = ( + CODE_ROOT / "utils/templates/overrides" / namespace / f"{name}.json" + ) + if os.path.exists(override_file_path): + with open(override_file_path) as f: + override_json = json.load(f) + api = deepmerge.always_merger.merge(api, override_json) + return api + + def dump_modules(modules): for mod in modules.values(): mod.dump() diff --git a/utils/templates/overrides/indices/put_mapping.json b/utils/templates/overrides/indices/put_mapping.json new file mode 100644 index 00000000..4409c446 --- /dev/null +++ b/utils/templates/overrides/indices/put_mapping.json @@ -0,0 +1,20 @@ +{ + "url": { + "paths": [ + { + "path": "/{index}/_mapping", + "methods": [ + "POST", + "PUT" + ], + "parts": { + "index": { + "type": "string", + "description": "Comma-separated list of indices; use `_all` or empty string to perform the operation on all indices.", + "required": false + } + } + } + ] + } +} \ No newline at end of file From 5cea1b57b3a86abce9829ba74fca42d30091128f Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 26 Oct 2023 11:55:25 -0400 Subject: [PATCH 36/80] Add a nox session for client generator. (#554) Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + DEVELOPER_GUIDE.md | 14 ++++++-------- noxfile.py | 7 +++++++ 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6803cf4c..2a9955a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added guide on making raw JSON REST requests ([#542](https://github.com/opensearch-project/opensearch-py/pull/542)) - Added support for AWS SigV4 for urllib3 ([#547](https://github.com/opensearch-project/opensearch-py/pull/547)) - Added `remote store` client APIs ([#552](https://github.com/opensearch-project/opensearch-py/pull/552)) +- Added `nox -rs generate` ([#554](https://github.com/opensearch-project/opensearch-py/pull/554)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 5fe9cad1..dd1fad26 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -1,10 +1,10 @@ - [Developer Guide](#developer-guide) - [Prerequisites](#prerequisites) - - [Docker Image Installation](#docker-setup) + - [Install Docker Image](#install-docker-image) - [Running Tests](#running-tests) - - [Integration Tests](#integration-tests) + - [Linter](#linter) - [Documentation](#documentation) - - [Running Python Client Generator](#running-python-client-generator) + - [Client Code Generator](#client-code-generator) # Developer Guide @@ -115,12 +115,10 @@ make html Open `opensearch-py/docs/build/html/index.html` to see results. -## Running Python Client Generator +## Client Code Generator -The following code executes a python client generator that updates the client by utilizing the [openapi specifications](https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json) found in the "opensearch-api-specification" repository. This process allows for the automatic generation and synchronization of the client code with the latest API specifications. +OpenSearch publishes an [OpenAPI specification](https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json) in the [opensearch-api-specification](https://github.com/opensearch-project/opensearch-api-specification) repository, which is used to auto-generate the less interesting parts of the client. ``` -cd opensearch-py -python utils/generate-api.py -nox -rs format +nox -rs generate ``` diff --git a/noxfile.py b/noxfile.py index 6b734b48..ba2fe7b9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -87,3 +87,10 @@ def docs(session): "-rdev-requirements.txt", "sphinx-rtd-theme", "sphinx-autodoc-typehints" ) session.run("python", "-m", "pip", "install", "sphinx-autodoc-typehints") + + +@nox.session() +def generate(session): + session.install("-rdev-requirements.txt") + session.run("python", "utils/generate-api.py") + format(session) From ed32d8ea3e47a15d943cc3b779780e122365cc8a Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 26 Oct 2023 12:30:23 -0400 Subject: [PATCH 37/80] Fix: don't hardcode client class name. (#555) Signed-off-by: dblock Signed-off-by: roma2023 --- opensearchpy/_async/client/plugins.py | 2 +- opensearchpy/client/plugins.py | 5 +--- .../test_async/test_plugins_client.py | 24 +++++++++++++++++++ .../test_plugins/test_plugins_client.py | 24 +++++++++++++++++++ 4 files changed, 50 insertions(+), 5 deletions(-) create mode 100644 test_opensearchpy/test_async/test_plugins_client.py create mode 100644 test_opensearchpy/test_client/test_plugins/test_plugins_client.py diff --git a/opensearchpy/_async/client/plugins.py b/opensearchpy/_async/client/plugins.py index 2b762ba3..b39576c1 100644 --- a/opensearchpy/_async/client/plugins.py +++ b/opensearchpy/_async/client/plugins.py @@ -44,7 +44,7 @@ def _dynamic_lookup(self, client): setattr(client, plugin, getattr(self, plugin)) else: warnings.warn( - f"Cannot load `{plugin}` directly to AsyncOpenSearch. `{plugin}` already exists in AsyncOpenSearch. Please use `AsyncOpenSearch.plugin.{plugin}` instead.", + f"Cannot load `{plugin}` directly to {self.client.__class__.__name__} as it already exists. Use `{self.client.__class__.__name__}.plugin.{plugin}` instead.", category=RuntimeWarning, stacklevel=2, ) diff --git a/opensearchpy/client/plugins.py b/opensearchpy/client/plugins.py index 7fba8c32..b39576c1 100644 --- a/opensearchpy/client/plugins.py +++ b/opensearchpy/client/plugins.py @@ -7,7 +7,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - import warnings from ..plugins.alerting import AlertingClient @@ -45,9 +44,7 @@ def _dynamic_lookup(self, client): setattr(client, plugin, getattr(self, plugin)) else: warnings.warn( - "Cannot load `{plugin}` directly to OpenSearch. `{plugin}` already exists in OpenSearch. Please use `OpenSearch.plugin.{plugin}` instead.".format( - plugin=plugin - ), + f"Cannot load `{plugin}` directly to {self.client.__class__.__name__} as it already exists. Use `{self.client.__class__.__name__}.plugin.{plugin}` instead.", category=RuntimeWarning, stacklevel=2, ) diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py new file mode 100644 index 00000000..c620873c --- /dev/null +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from unittest import TestCase + +from opensearchpy._async.client import AsyncOpenSearch + + +class TestPluginsClient(TestCase): + async def test_plugins_client(self): + with self.assertWarns(Warning) as w: + client = AsyncOpenSearch() + client.plugins.__init__(client) # double-init + self.assertEqual( + str(w.warnings[0].message), + "Cannot load `alerting` directly to AsyncOpenSearch as it already exists. Use `AsyncOpenSearch.plugin.alerting` instead.", + ) diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py new file mode 100644 index 00000000..e717d9cb --- /dev/null +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from opensearchpy.client import OpenSearch + +from ...test_cases import TestCase + + +class TestPluginsClient(TestCase): + def test_plugins_client(self): + with self.assertWarns(Warning) as w: + client = OpenSearch() + client.plugins.__init__(client) # double-init + self.assertEqual( + str(w.warnings[0].message), + "Cannot load `alerting` directly to OpenSearch as it already exists. Use `OpenSearch.plugin.alerting` instead.", + ) From 4f51d9c2fb67d1150b3611278bd0ab7f68d2eec7 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 26 Oct 2023 13:34:34 -0400 Subject: [PATCH 38/80] Added a utf-8 header to all .py files (#557) Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + benchmarks/bench_async.py | 1 + benchmarks/bench_info_sync.py | 1 + benchmarks/bench_sync.py | 1 + benchmarks/bench_sync_async.py | 1 + benchmarks/thread_with_return_value.py | 1 + noxfile.py | 1 + opensearchpy/__init__.py | 1 + opensearchpy/__init__.pyi | 1 + opensearchpy/_async/__init__.py | 1 + opensearchpy/_async/_extra_imports.py | 1 + opensearchpy/_async/client/_patch.py | 1 + opensearchpy/_async/client/_patch.pyi | 1 + opensearchpy/_async/client/cat.py | 1 + opensearchpy/_async/client/cat.pyi | 1 + opensearchpy/_async/client/cluster.py | 1 + opensearchpy/_async/client/cluster.pyi | 1 + opensearchpy/_async/client/dangling_indices.py | 1 + opensearchpy/_async/client/dangling_indices.pyi | 1 + opensearchpy/_async/client/features.py | 1 + opensearchpy/_async/client/features.pyi | 1 + opensearchpy/_async/client/indices.py | 1 + opensearchpy/_async/client/indices.pyi | 1 + opensearchpy/_async/client/ingest.py | 1 + opensearchpy/_async/client/ingest.pyi | 1 + opensearchpy/_async/client/nodes.py | 1 + opensearchpy/_async/client/nodes.pyi | 1 + opensearchpy/_async/client/plugins.py | 1 + opensearchpy/_async/client/plugins.pyi | 1 + opensearchpy/_async/client/remote.py | 1 + opensearchpy/_async/client/remote.pyi | 1 + opensearchpy/_async/client/remote_store.py | 1 + opensearchpy/_async/client/remote_store.pyi | 1 + opensearchpy/_async/client/security.py | 1 + opensearchpy/_async/client/security.pyi | 1 + opensearchpy/_async/client/snapshot.py | 1 + opensearchpy/_async/client/snapshot.pyi | 1 + opensearchpy/_async/client/tasks.py | 1 + opensearchpy/_async/client/tasks.pyi | 1 + opensearchpy/_async/client/utils.py | 1 + opensearchpy/_async/client/utils.pyi | 1 + opensearchpy/_async/compat.py | 1 + opensearchpy/_async/compat.pyi | 1 + opensearchpy/_async/helpers/__init__.py | 1 + opensearchpy/_async/helpers/actions.py | 1 + opensearchpy/_async/helpers/actions.pyi | 1 + opensearchpy/_async/helpers/document.py | 1 + opensearchpy/_async/helpers/document.pyi | 1 + opensearchpy/_async/helpers/faceted_search.py | 1 + opensearchpy/_async/helpers/faceted_search.pyi | 1 + opensearchpy/_async/helpers/index.py | 1 + opensearchpy/_async/helpers/index.pyi | 1 + opensearchpy/_async/helpers/mapping.py | 1 + opensearchpy/_async/helpers/mapping.pyi | 1 + opensearchpy/_async/helpers/search.py | 1 + opensearchpy/_async/helpers/search.pyi | 1 + opensearchpy/_async/helpers/test.py | 1 + opensearchpy/_async/helpers/test.pyi | 1 + opensearchpy/_async/helpers/update_by_query.py | 1 + opensearchpy/_async/helpers/update_by_query.pyi | 1 + opensearchpy/_async/http_aiohttp.py | 1 + opensearchpy/_async/http_aiohttp.pyi | 1 + opensearchpy/_async/plugins/__init__.py | 1 + opensearchpy/_async/plugins/__init__.pyi | 1 + opensearchpy/_async/plugins/alerting.py | 1 + opensearchpy/_async/plugins/alerting.pyi | 1 + opensearchpy/_async/plugins/index_management.py | 1 + opensearchpy/_async/plugins/index_management.pyi | 1 + opensearchpy/_async/transport.py | 1 + opensearchpy/_async/transport.pyi | 1 + opensearchpy/_version.py | 1 + opensearchpy/client/_patch.py | 1 + opensearchpy/client/_patch.pyi | 1 + opensearchpy/client/cat.py | 1 + opensearchpy/client/cat.pyi | 1 + opensearchpy/client/cluster.py | 1 + opensearchpy/client/cluster.pyi | 1 + opensearchpy/client/dangling_indices.py | 1 + opensearchpy/client/dangling_indices.pyi | 1 + opensearchpy/client/features.py | 1 + opensearchpy/client/features.pyi | 1 + opensearchpy/client/indices.py | 1 + opensearchpy/client/indices.pyi | 1 + opensearchpy/client/ingest.py | 1 + opensearchpy/client/ingest.pyi | 1 + opensearchpy/client/nodes.py | 1 + opensearchpy/client/nodes.pyi | 1 + opensearchpy/client/plugins.py | 1 + opensearchpy/client/plugins.pyi | 1 + opensearchpy/client/remote.py | 1 + opensearchpy/client/remote.pyi | 1 + opensearchpy/client/remote_store.py | 1 + opensearchpy/client/remote_store.pyi | 1 + opensearchpy/client/security.py | 1 + opensearchpy/client/security.pyi | 1 + opensearchpy/client/snapshot.py | 1 + opensearchpy/client/snapshot.pyi | 1 + opensearchpy/client/tasks.py | 1 + opensearchpy/client/tasks.pyi | 1 + opensearchpy/client/utils.py | 1 + opensearchpy/client/utils.pyi | 1 + opensearchpy/compat.py | 1 + opensearchpy/compat.pyi | 1 + opensearchpy/connection/__init__.py | 1 + opensearchpy/connection/__init__.pyi | 1 + opensearchpy/connection/async_connections.py | 1 + opensearchpy/connection/async_connections.pyi | 1 + opensearchpy/connection/base.py | 1 + opensearchpy/connection/base.pyi | 1 + opensearchpy/connection/connections.py | 1 + opensearchpy/connection/connections.pyi | 1 + opensearchpy/connection/http_async.py | 1 + opensearchpy/connection/http_async.pyi | 1 + opensearchpy/connection/http_requests.py | 1 + opensearchpy/connection/http_requests.pyi | 1 + opensearchpy/connection/http_urllib3.py | 1 + opensearchpy/connection/http_urllib3.pyi | 1 + opensearchpy/connection/pooling.py | 1 + opensearchpy/connection/pooling.pyi | 1 + opensearchpy/connection_pool.py | 1 + opensearchpy/connection_pool.pyi | 1 + opensearchpy/exceptions.py | 1 + opensearchpy/exceptions.pyi | 1 + opensearchpy/helpers/__init__.py | 1 + opensearchpy/helpers/__init__.pyi | 1 + opensearchpy/helpers/actions.py | 1 + opensearchpy/helpers/actions.pyi | 1 + opensearchpy/helpers/aggs.py | 1 + opensearchpy/helpers/aggs.pyi | 1 + opensearchpy/helpers/analysis.py | 1 + opensearchpy/helpers/analysis.pyi | 1 + opensearchpy/helpers/asyncsigner.py | 1 + opensearchpy/helpers/asyncsigner.pyi | 1 + opensearchpy/helpers/document.py | 1 + opensearchpy/helpers/document.pyi | 1 + opensearchpy/helpers/errors.py | 1 + opensearchpy/helpers/errors.pyi | 1 + opensearchpy/helpers/faceted_search.py | 1 + opensearchpy/helpers/faceted_search.pyi | 1 + opensearchpy/helpers/field.py | 1 + opensearchpy/helpers/field.pyi | 1 + opensearchpy/helpers/function.py | 1 + opensearchpy/helpers/function.pyi | 1 + opensearchpy/helpers/index.py | 1 + opensearchpy/helpers/index.pyi | 1 + opensearchpy/helpers/mapping.py | 1 + opensearchpy/helpers/mapping.pyi | 1 + opensearchpy/helpers/query.py | 1 + opensearchpy/helpers/query.pyi | 1 + opensearchpy/helpers/response/__init__.py | 1 + opensearchpy/helpers/response/__init__.pyi | 1 + opensearchpy/helpers/response/aggs.py | 1 + opensearchpy/helpers/response/aggs.pyi | 1 + opensearchpy/helpers/response/hit.py | 1 + opensearchpy/helpers/response/hit.pyi | 1 + opensearchpy/helpers/search.py | 1 + opensearchpy/helpers/search.pyi | 1 + opensearchpy/helpers/signer.py | 1 + opensearchpy/helpers/test.py | 1 + opensearchpy/helpers/test.pyi | 1 + opensearchpy/helpers/update_by_query.py | 1 + opensearchpy/helpers/update_by_query.pyi | 1 + opensearchpy/helpers/utils.py | 1 + opensearchpy/helpers/utils.pyi | 1 + opensearchpy/helpers/wrappers.py | 1 + opensearchpy/helpers/wrappers.pyi | 1 + opensearchpy/plugins/__init__.py | 1 + opensearchpy/plugins/__init__.pyi | 1 + opensearchpy/plugins/alerting.py | 1 + opensearchpy/plugins/alerting.pyi | 1 + opensearchpy/plugins/index_management.py | 1 + opensearchpy/plugins/index_management.pyi | 1 + opensearchpy/serializer.py | 1 + opensearchpy/serializer.pyi | 1 + opensearchpy/transport.py | 1 + opensearchpy/transport.pyi | 1 + samples/aws/search-requests.py | 1 + samples/aws/search-urllib3.py | 1 + samples/bulk/bulk-array.py | 1 + samples/bulk/bulk-helpers.py | 1 + samples/bulk/bulk-ld.py | 1 + samples/hello/hello-async.py | 1 + samples/hello/hello.py | 1 + samples/json/hello-async.py | 1 + samples/json/hello.py | 1 + samples/knn/knn-async-basics.py | 1 + samples/knn/knn-basics.py | 1 + samples/knn/knn-boolean-filter.py | 1 + samples/knn/knn-efficient-filter.py | 1 + samples/security/roles.py | 1 + samples/security/users.py | 1 + test_opensearchpy/TestHttpServer.py | 1 + test_opensearchpy/__init__.py | 1 + test_opensearchpy/run_tests.py | 1 + test_opensearchpy/test_async/__init__.py | 1 + test_opensearchpy/test_async/test_helpers/__init__.py | 1 + test_opensearchpy/test_async/test_helpers/conftest.py | 1 + test_opensearchpy/test_async/test_helpers/test_document.py | 1 + test_opensearchpy/test_async/test_helpers/test_faceted_search.py | 1 + test_opensearchpy/test_async/test_helpers/test_index.py | 1 + test_opensearchpy/test_async/test_helpers/test_mapping.py | 1 + test_opensearchpy/test_async/test_helpers/test_search.py | 1 + .../test_async/test_helpers/test_update_by_query.py | 1 + test_opensearchpy/test_async/test_http_connection.py | 1 + test_opensearchpy/test_async/test_server/__init__.py | 1 + test_opensearchpy/test_async/test_server/conftest.py | 1 + .../test_async/test_server/test_helpers/__init__.py | 1 + .../test_async/test_server/test_helpers/conftest.py | 1 + .../test_async/test_server/test_helpers/test_actions.py | 1 + .../test_async/test_server/test_helpers/test_data.py | 1 + .../test_async/test_server/test_helpers/test_document.py | 1 + .../test_async/test_server/test_helpers/test_faceted_search.py | 1 + .../test_async/test_server/test_helpers/test_index.py | 1 + .../test_async/test_server/test_helpers/test_mapping.py | 1 + .../test_async/test_server/test_helpers/test_search.py | 1 + .../test_async/test_server/test_helpers/test_update_by_query.py | 1 + .../test_async/test_server/test_plugins/__init__.py | 1 + test_opensearchpy/test_async/test_server/test_rest_api_spec.py | 1 + test_opensearchpy/test_async/test_server_secured/__init__.py | 1 + test_opensearchpy/test_cases.py | 1 + test_opensearchpy/test_client/__init__.py | 1 + test_opensearchpy/test_client/test_cluster.py | 1 + test_opensearchpy/test_client/test_indices.py | 1 + test_opensearchpy/test_client/test_plugins/__init__.py | 1 + test_opensearchpy/test_client/test_plugins/test_alerting.py | 1 + .../test_client/test_plugins/test_index_management.py | 1 + test_opensearchpy/test_client/test_point_in_time.py | 1 + test_opensearchpy/test_client/test_remote_store.py | 1 + test_opensearchpy/test_client/test_requests.py | 1 + test_opensearchpy/test_client/test_urllib3.py | 1 + test_opensearchpy/test_connection/__init__.py | 1 + test_opensearchpy/test_connection_pool.py | 1 + test_opensearchpy/test_exceptions.py | 1 + test_opensearchpy/test_helpers/__init__.py | 1 + test_opensearchpy/test_helpers/test_aggs.py | 1 + test_opensearchpy/test_helpers/test_document.py | 1 + test_opensearchpy/test_helpers/test_faceted_search.py | 1 + test_opensearchpy/test_helpers/test_field.py | 1 + test_opensearchpy/test_helpers/test_index.py | 1 + test_opensearchpy/test_helpers/test_mapping.py | 1 + test_opensearchpy/test_helpers/test_query.py | 1 + test_opensearchpy/test_helpers/test_result.py | 1 + test_opensearchpy/test_helpers/test_search.py | 1 + test_opensearchpy/test_helpers/test_update_by_query.py | 1 + test_opensearchpy/test_helpers/test_utils.py | 1 + test_opensearchpy/test_helpers/test_validation.py | 1 + test_opensearchpy/test_helpers/test_wrappers.py | 1 + test_opensearchpy/test_server/__init__.py | 1 + test_opensearchpy/test_server/conftest.py | 1 + test_opensearchpy/test_server/test_helpers/__init__.py | 1 + test_opensearchpy/test_server/test_helpers/test_actions.py | 1 + test_opensearchpy/test_server/test_helpers/test_analysis.py | 1 + test_opensearchpy/test_server/test_helpers/test_count.py | 1 + test_opensearchpy/test_server/test_helpers/test_data.py | 1 + test_opensearchpy/test_server/test_helpers/test_document.py | 1 + .../test_server/test_helpers/test_faceted_search.py | 1 + test_opensearchpy/test_server/test_helpers/test_index.py | 1 + test_opensearchpy/test_server/test_helpers/test_mapping.py | 1 + .../test_server/test_helpers/test_update_by_query.py | 1 + test_opensearchpy/test_server/test_plugins/__init__.py | 1 + test_opensearchpy/test_server/test_rest_api_spec.py | 1 + test_opensearchpy/test_server_secured/__init__.py | 1 + test_opensearchpy/test_types/aliased_types.py | 1 + test_opensearchpy/test_types/async_types.py | 1 + test_opensearchpy/test_types/sync_types.py | 1 + test_opensearchpy/utils.py | 1 + utils/build-dists.py | 1 + utils/generate-api.py | 1 + 268 files changed, 268 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a9955a0..02826288 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added support for AWS SigV4 for urllib3 ([#547](https://github.com/opensearch-project/opensearch-py/pull/547)) - Added `remote store` client APIs ([#552](https://github.com/opensearch-project/opensearch-py/pull/552)) - Added `nox -rs generate` ([#554](https://github.com/opensearch-project/opensearch-py/pull/554)) +- Added a utf-8 header to all .py files ([#557](https://github.com/opensearch-project/opensearch-py/pull/557)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index d08ca634..015801f4 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 03e6f998..618954d7 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index f20ca9f0..146974f1 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/bench_sync_async.py b/benchmarks/bench_sync_async.py index 5fa97f46..8c43e278 100644 --- a/benchmarks/bench_sync_async.py +++ b/benchmarks/bench_sync_async.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/thread_with_return_value.py b/benchmarks/thread_with_return_value.py index fb495656..46fefe1f 100644 --- a/benchmarks/thread_with_return_value.py +++ b/benchmarks/thread_with_return_value.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/noxfile.py b/noxfile.py index ba2fe7b9..04374cd4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index a0ea9f60..8116d60a 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/__init__.pyi b/opensearchpy/__init__.pyi index 0fa4afcf..96c17075 100644 --- a/opensearchpy/__init__.pyi +++ b/opensearchpy/__init__.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/__init__.py b/opensearchpy/_async/__init__.py index 7e52ae22..392fa5bd 100644 --- a/opensearchpy/_async/__init__.py +++ b/opensearchpy/_async/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/_extra_imports.py b/opensearchpy/_async/_extra_imports.py index 5fd19461..e19a11a9 100644 --- a/opensearchpy/_async/_extra_imports.py +++ b/opensearchpy/_async/_extra_imports.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/_patch.py b/opensearchpy/_async/client/_patch.py index b1b00942..f3a953c0 100644 --- a/opensearchpy/_async/client/_patch.py +++ b/opensearchpy/_async/client/_patch.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/_patch.pyi b/opensearchpy/_async/client/_patch.pyi index 1912c180..d49a7fec 100644 --- a/opensearchpy/_async/client/_patch.pyi +++ b/opensearchpy/_async/client/_patch.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index d2864097..2c2b01c0 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/cat.pyi b/opensearchpy/_async/client/cat.pyi index 435403e9..404400cd 100644 --- a/opensearchpy/_async/client/cat.pyi +++ b/opensearchpy/_async/client/cat.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 7c9c5f46..8bd55390 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/cluster.pyi b/opensearchpy/_async/client/cluster.pyi index b75ec46c..74f88694 100644 --- a/opensearchpy/_async/client/cluster.pyi +++ b/opensearchpy/_async/client/cluster.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index b284ac27..bc886d65 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/dangling_indices.pyi b/opensearchpy/_async/client/dangling_indices.pyi index 17ab1ac8..d9dea8a1 100644 --- a/opensearchpy/_async/client/dangling_indices.pyi +++ b/opensearchpy/_async/client/dangling_indices.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/features.py b/opensearchpy/_async/client/features.py index 7922f955..e2c1bb7d 100644 --- a/opensearchpy/_async/client/features.py +++ b/opensearchpy/_async/client/features.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/features.pyi b/opensearchpy/_async/client/features.pyi index 96acb588..38fb992e 100644 --- a/opensearchpy/_async/client/features.pyi +++ b/opensearchpy/_async/client/features.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index cfc48db4..b83cb73c 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/indices.pyi b/opensearchpy/_async/client/indices.pyi index 0d9b5953..1a5c0912 100644 --- a/opensearchpy/_async/client/indices.pyi +++ b/opensearchpy/_async/client/indices.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index eab27980..0d56f7e1 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/ingest.pyi b/opensearchpy/_async/client/ingest.pyi index 40d3c7d9..9dd4fc2b 100644 --- a/opensearchpy/_async/client/ingest.pyi +++ b/opensearchpy/_async/client/ingest.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index 31cd4915..a89fee94 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/nodes.pyi b/opensearchpy/_async/client/nodes.pyi index b34a7ba9..c18afb83 100644 --- a/opensearchpy/_async/client/nodes.pyi +++ b/opensearchpy/_async/client/nodes.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/plugins.py b/opensearchpy/_async/client/plugins.py index b39576c1..19570be4 100644 --- a/opensearchpy/_async/client/plugins.py +++ b/opensearchpy/_async/client/plugins.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/plugins.pyi b/opensearchpy/_async/client/plugins.pyi index 88383d01..44576c74 100644 --- a/opensearchpy/_async/client/plugins.pyi +++ b/opensearchpy/_async/client/plugins.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/remote.py b/opensearchpy/_async/client/remote.py index 02aa931d..eee7319d 100644 --- a/opensearchpy/_async/client/remote.py +++ b/opensearchpy/_async/client/remote.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/remote.pyi b/opensearchpy/_async/client/remote.pyi index 068c690b..a2d7dc51 100644 --- a/opensearchpy/_async/client/remote.pyi +++ b/opensearchpy/_async/client/remote.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index 457369ac..e59d1870 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/remote_store.pyi b/opensearchpy/_async/client/remote_store.pyi index 8ea2d077..b14866ef 100644 --- a/opensearchpy/_async/client/remote_store.pyi +++ b/opensearchpy/_async/client/remote_store.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index 49e658d9..43265506 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/security.pyi b/opensearchpy/_async/client/security.pyi index 182d06c4..b3010b3b 100644 --- a/opensearchpy/_async/client/security.pyi +++ b/opensearchpy/_async/client/security.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 37f46820..f9960b64 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/snapshot.pyi b/opensearchpy/_async/client/snapshot.pyi index b065e86b..b219a323 100644 --- a/opensearchpy/_async/client/snapshot.pyi +++ b/opensearchpy/_async/client/snapshot.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 2bf73c6d..7efce482 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/tasks.pyi b/opensearchpy/_async/client/tasks.pyi index 14081a2d..f3cf05d0 100644 --- a/opensearchpy/_async/client/tasks.pyi +++ b/opensearchpy/_async/client/tasks.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/utils.py b/opensearchpy/_async/client/utils.py index b9ea1894..59bedb8e 100644 --- a/opensearchpy/_async/client/utils.py +++ b/opensearchpy/_async/client/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/utils.pyi b/opensearchpy/_async/client/utils.pyi index bf88f587..e175d5e2 100644 --- a/opensearchpy/_async/client/utils.pyi +++ b/opensearchpy/_async/client/utils.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/compat.py b/opensearchpy/_async/compat.py index d9c411d4..66c2eca8 100644 --- a/opensearchpy/_async/compat.py +++ b/opensearchpy/_async/compat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/compat.pyi b/opensearchpy/_async/compat.pyi index 60b54b86..290396de 100644 --- a/opensearchpy/_async/compat.pyi +++ b/opensearchpy/_async/compat.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/__init__.py b/opensearchpy/_async/helpers/__init__.py index 6c0097cd..22c54ac8 100644 --- a/opensearchpy/_async/helpers/__init__.py +++ b/opensearchpy/_async/helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 323a6668..1f49220f 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/actions.pyi b/opensearchpy/_async/helpers/actions.pyi index cd6b6974..20cc0661 100644 --- a/opensearchpy/_async/helpers/actions.pyi +++ b/opensearchpy/_async/helpers/actions.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index 7f796a86..e71bef46 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/document.pyi b/opensearchpy/_async/helpers/document.pyi index 71eb4ef4..f39d5471 100644 --- a/opensearchpy/_async/helpers/document.pyi +++ b/opensearchpy/_async/helpers/document.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/faceted_search.py b/opensearchpy/_async/helpers/faceted_search.py index c6ca4385..86f22e00 100644 --- a/opensearchpy/_async/helpers/faceted_search.py +++ b/opensearchpy/_async/helpers/faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/faceted_search.pyi b/opensearchpy/_async/helpers/faceted_search.pyi index 443e87c5..0e79f1f6 100644 --- a/opensearchpy/_async/helpers/faceted_search.pyi +++ b/opensearchpy/_async/helpers/faceted_search.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index c3e5a371..51082dc6 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/index.pyi b/opensearchpy/_async/helpers/index.pyi index 5b9d8720..6a89f0d1 100644 --- a/opensearchpy/_async/helpers/index.pyi +++ b/opensearchpy/_async/helpers/index.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index 1ccec472..967c74c8 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/mapping.pyi b/opensearchpy/_async/helpers/mapping.pyi index 61505f42..91b8d64b 100644 --- a/opensearchpy/_async/helpers/mapping.pyi +++ b/opensearchpy/_async/helpers/mapping.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/search.py b/opensearchpy/_async/helpers/search.py index bd6884cf..73c52971 100644 --- a/opensearchpy/_async/helpers/search.py +++ b/opensearchpy/_async/helpers/search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/search.pyi b/opensearchpy/_async/helpers/search.pyi index 4fb1cd3c..3413c889 100644 --- a/opensearchpy/_async/helpers/search.pyi +++ b/opensearchpy/_async/helpers/search.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/test.py b/opensearchpy/_async/helpers/test.py index c8e43273..895ae991 100644 --- a/opensearchpy/_async/helpers/test.py +++ b/opensearchpy/_async/helpers/test.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/test.pyi b/opensearchpy/_async/helpers/test.pyi index 451bfc14..497d8caf 100644 --- a/opensearchpy/_async/helpers/test.pyi +++ b/opensearchpy/_async/helpers/test.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/update_by_query.py b/opensearchpy/_async/helpers/update_by_query.py index 322b1488..fc9eef54 100644 --- a/opensearchpy/_async/helpers/update_by_query.py +++ b/opensearchpy/_async/helpers/update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/update_by_query.pyi b/opensearchpy/_async/helpers/update_by_query.pyi index 3c5a9ed7..57d692c6 100644 --- a/opensearchpy/_async/helpers/update_by_query.pyi +++ b/opensearchpy/_async/helpers/update_by_query.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index cc426164..cab7782e 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/http_aiohttp.pyi b/opensearchpy/_async/http_aiohttp.pyi index 223fdfff..d641a5eb 100644 --- a/opensearchpy/_async/http_aiohttp.pyi +++ b/opensearchpy/_async/http_aiohttp.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/__init__.py b/opensearchpy/_async/plugins/__init__.py index 6c0097cd..22c54ac8 100644 --- a/opensearchpy/_async/plugins/__init__.py +++ b/opensearchpy/_async/plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/__init__.pyi b/opensearchpy/_async/plugins/__init__.pyi index 6c0097cd..22c54ac8 100644 --- a/opensearchpy/_async/plugins/__init__.pyi +++ b/opensearchpy/_async/plugins/__init__.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/alerting.py b/opensearchpy/_async/plugins/alerting.py index d8b27937..be79ed02 100644 --- a/opensearchpy/_async/plugins/alerting.py +++ b/opensearchpy/_async/plugins/alerting.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/alerting.pyi b/opensearchpy/_async/plugins/alerting.pyi index 50392224..7629df93 100644 --- a/opensearchpy/_async/plugins/alerting.pyi +++ b/opensearchpy/_async/plugins/alerting.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/index_management.py b/opensearchpy/_async/plugins/index_management.py index 3be06e6a..ea654bc2 100644 --- a/opensearchpy/_async/plugins/index_management.py +++ b/opensearchpy/_async/plugins/index_management.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/index_management.pyi b/opensearchpy/_async/plugins/index_management.pyi index cd08954d..98d50097 100644 --- a/opensearchpy/_async/plugins/index_management.pyi +++ b/opensearchpy/_async/plugins/index_management.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index 3db4516c..bf1b77d6 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/transport.pyi b/opensearchpy/_async/transport.pyi index cc9406bf..5d66514d 100644 --- a/opensearchpy/_async/transport.pyi +++ b/opensearchpy/_async/transport.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 82fac929..2410b9f5 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/_patch.py b/opensearchpy/client/_patch.py index d92eae5a..bbb69d52 100644 --- a/opensearchpy/client/_patch.py +++ b/opensearchpy/client/_patch.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/_patch.pyi b/opensearchpy/client/_patch.pyi index b01423b9..b1819682 100644 --- a/opensearchpy/client/_patch.pyi +++ b/opensearchpy/client/_patch.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index cd0c42cf..49d797ca 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/cat.pyi b/opensearchpy/client/cat.pyi index fc076ef8..0d690dda 100644 --- a/opensearchpy/client/cat.pyi +++ b/opensearchpy/client/cat.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index f2276261..248c7ce3 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/cluster.pyi b/opensearchpy/client/cluster.pyi index 7ea5016f..ad2d3fac 100644 --- a/opensearchpy/client/cluster.pyi +++ b/opensearchpy/client/cluster.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index 7cb3ea34..adc4aea3 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/dangling_indices.pyi b/opensearchpy/client/dangling_indices.pyi index 203805a1..b48ba830 100644 --- a/opensearchpy/client/dangling_indices.pyi +++ b/opensearchpy/client/dangling_indices.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/features.py b/opensearchpy/client/features.py index a9e6ab95..b96ea308 100644 --- a/opensearchpy/client/features.py +++ b/opensearchpy/client/features.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/features.pyi b/opensearchpy/client/features.pyi index 8da34e42..6abcd79e 100644 --- a/opensearchpy/client/features.pyi +++ b/opensearchpy/client/features.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 9db06dd6..3f8df6c6 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/indices.pyi b/opensearchpy/client/indices.pyi index 51751d53..87048693 100644 --- a/opensearchpy/client/indices.pyi +++ b/opensearchpy/client/indices.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index a14bc0f6..6282c7b8 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/ingest.pyi b/opensearchpy/client/ingest.pyi index 251071e3..c7531f0e 100644 --- a/opensearchpy/client/ingest.pyi +++ b/opensearchpy/client/ingest.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 6b73177b..28ea1357 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/nodes.pyi b/opensearchpy/client/nodes.pyi index 67e5a05c..78465481 100644 --- a/opensearchpy/client/nodes.pyi +++ b/opensearchpy/client/nodes.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/plugins.py b/opensearchpy/client/plugins.py index b39576c1..19570be4 100644 --- a/opensearchpy/client/plugins.py +++ b/opensearchpy/client/plugins.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/plugins.pyi b/opensearchpy/client/plugins.pyi index 2e4b2630..da9a7488 100644 --- a/opensearchpy/client/plugins.pyi +++ b/opensearchpy/client/plugins.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/remote.py b/opensearchpy/client/remote.py index 3f483697..eba66927 100644 --- a/opensearchpy/client/remote.py +++ b/opensearchpy/client/remote.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/remote.pyi b/opensearchpy/client/remote.pyi index 949301a7..93e8c067 100644 --- a/opensearchpy/client/remote.pyi +++ b/opensearchpy/client/remote.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index 38d8a1c6..8f4313b7 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/remote_store.pyi b/opensearchpy/client/remote_store.pyi index 99928f3b..50358e63 100644 --- a/opensearchpy/client/remote_store.pyi +++ b/opensearchpy/client/remote_store.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 1af50448..14bc0229 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/security.pyi b/opensearchpy/client/security.pyi index dbcc1179..99e009d9 100644 --- a/opensearchpy/client/security.pyi +++ b/opensearchpy/client/security.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index ac0683a5..313f7dd3 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/snapshot.pyi b/opensearchpy/client/snapshot.pyi index 8d50f086..fd239fad 100644 --- a/opensearchpy/client/snapshot.pyi +++ b/opensearchpy/client/snapshot.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 0cc7a4b2..90c4e731 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/tasks.pyi b/opensearchpy/client/tasks.pyi index 50ad69bb..0aeed153 100644 --- a/opensearchpy/client/tasks.pyi +++ b/opensearchpy/client/tasks.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index aaa07995..7b7366de 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/utils.pyi b/opensearchpy/client/utils.pyi index 4924fed9..2aa263fc 100644 --- a/opensearchpy/client/utils.pyi +++ b/opensearchpy/client/utils.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index 4f74c740..57a88a74 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/compat.pyi b/opensearchpy/compat.pyi index c9607668..2606c723 100644 --- a/opensearchpy/compat.pyi +++ b/opensearchpy/compat.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/__init__.py b/opensearchpy/connection/__init__.py index 6e331a54..40037859 100644 --- a/opensearchpy/connection/__init__.py +++ b/opensearchpy/connection/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/__init__.pyi b/opensearchpy/connection/__init__.pyi index ad1d9e62..f3f31016 100644 --- a/opensearchpy/connection/__init__.pyi +++ b/opensearchpy/connection/__init__.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index acaa0b68..87dd22d7 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/async_connections.pyi b/opensearchpy/connection/async_connections.pyi index 8935ec6b..eb310cdf 100644 --- a/opensearchpy/connection/async_connections.pyi +++ b/opensearchpy/connection/async_connections.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index 05edca73..ee8d934f 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/base.pyi b/opensearchpy/connection/base.pyi index 7e51d20c..333f4a70 100644 --- a/opensearchpy/connection/base.pyi +++ b/opensearchpy/connection/base.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index 857cba3a..4401ade0 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/connections.pyi b/opensearchpy/connection/connections.pyi index 07814ba4..d763f57c 100644 --- a/opensearchpy/connection/connections.pyi +++ b/opensearchpy/connection/connections.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_async.py b/opensearchpy/connection/http_async.py index 10f5a56a..b7288005 100644 --- a/opensearchpy/connection/http_async.py +++ b/opensearchpy/connection/http_async.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_async.pyi b/opensearchpy/connection/http_async.pyi index adde809b..9fcfb246 100644 --- a/opensearchpy/connection/http_async.pyi +++ b/opensearchpy/connection/http_async.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_requests.py b/opensearchpy/connection/http_requests.py index e0b6d143..f9e9b1a1 100644 --- a/opensearchpy/connection/http_requests.py +++ b/opensearchpy/connection/http_requests.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_requests.pyi b/opensearchpy/connection/http_requests.pyi index c9bb5617..61b6d496 100644 --- a/opensearchpy/connection/http_requests.pyi +++ b/opensearchpy/connection/http_requests.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index 0042cc3c..bde689ae 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_urllib3.pyi b/opensearchpy/connection/http_urllib3.pyi index 83d62117..7fe27617 100644 --- a/opensearchpy/connection/http_urllib3.pyi +++ b/opensearchpy/connection/http_urllib3.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/pooling.py b/opensearchpy/connection/pooling.py index bd9fe5f9..48503a58 100644 --- a/opensearchpy/connection/pooling.py +++ b/opensearchpy/connection/pooling.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/pooling.pyi b/opensearchpy/connection/pooling.pyi index b32fd068..53e38f40 100644 --- a/opensearchpy/connection/pooling.pyi +++ b/opensearchpy/connection/pooling.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index 61873748..7ff15512 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection_pool.pyi b/opensearchpy/connection_pool.pyi index 7a528cf4..e219591c 100644 --- a/opensearchpy/connection_pool.pyi +++ b/opensearchpy/connection_pool.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/exceptions.py b/opensearchpy/exceptions.py index cc35c91f..f2f994ca 100644 --- a/opensearchpy/exceptions.py +++ b/opensearchpy/exceptions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/exceptions.pyi b/opensearchpy/exceptions.pyi index 8adafdd8..0ecacc6f 100644 --- a/opensearchpy/exceptions.pyi +++ b/opensearchpy/exceptions.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/__init__.py b/opensearchpy/helpers/__init__.py index 8057de7e..7116dc48 100644 --- a/opensearchpy/helpers/__init__.py +++ b/opensearchpy/helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/__init__.pyi b/opensearchpy/helpers/__init__.pyi index 01d4973c..24c0d13d 100644 --- a/opensearchpy/helpers/__init__.pyi +++ b/opensearchpy/helpers/__init__.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index e565256f..587444a3 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/actions.pyi b/opensearchpy/helpers/actions.pyi index 4fee4bd1..e1ee4254 100644 --- a/opensearchpy/helpers/actions.pyi +++ b/opensearchpy/helpers/actions.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index 5a7f800c..db7d2c28 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/aggs.pyi b/opensearchpy/helpers/aggs.pyi index e3f6e93c..08b74a3a 100644 --- a/opensearchpy/helpers/aggs.pyi +++ b/opensearchpy/helpers/aggs.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index 251b004a..4e2646d7 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/analysis.pyi b/opensearchpy/helpers/analysis.pyi index b4f37af5..364a6ea5 100644 --- a/opensearchpy/helpers/analysis.pyi +++ b/opensearchpy/helpers/analysis.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/asyncsigner.py b/opensearchpy/helpers/asyncsigner.py index e21007d3..7f063c9f 100644 --- a/opensearchpy/helpers/asyncsigner.py +++ b/opensearchpy/helpers/asyncsigner.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/asyncsigner.pyi b/opensearchpy/helpers/asyncsigner.pyi index 2c701bb9..e0b5a7b5 100644 --- a/opensearchpy/helpers/asyncsigner.pyi +++ b/opensearchpy/helpers/asyncsigner.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index 7d45275a..de9891bc 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/document.pyi b/opensearchpy/helpers/document.pyi index d740b931..89ca6426 100644 --- a/opensearchpy/helpers/document.pyi +++ b/opensearchpy/helpers/document.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/errors.py b/opensearchpy/helpers/errors.py index dc9e62da..5d05bd23 100644 --- a/opensearchpy/helpers/errors.py +++ b/opensearchpy/helpers/errors.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/errors.pyi b/opensearchpy/helpers/errors.pyi index bed92df7..9572d68f 100644 --- a/opensearchpy/helpers/errors.pyi +++ b/opensearchpy/helpers/errors.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index 6da84dc2..e1bf9c0e 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/faceted_search.pyi b/opensearchpy/helpers/faceted_search.pyi index 3f1d175b..d3ff998d 100644 --- a/opensearchpy/helpers/faceted_search.pyi +++ b/opensearchpy/helpers/faceted_search.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/field.py b/opensearchpy/helpers/field.py index 756a3a0e..edeaecf0 100644 --- a/opensearchpy/helpers/field.py +++ b/opensearchpy/helpers/field.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/field.pyi b/opensearchpy/helpers/field.pyi index 3704aa81..3b448641 100644 --- a/opensearchpy/helpers/field.pyi +++ b/opensearchpy/helpers/field.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index 5b8db7b0..47b7b148 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/function.pyi b/opensearchpy/helpers/function.pyi index 58a00fba..72b4b342 100644 --- a/opensearchpy/helpers/function.pyi +++ b/opensearchpy/helpers/function.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index d6e08b50..6bbc23e8 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/index.pyi b/opensearchpy/helpers/index.pyi index 2bf5747e..e2f95797 100644 --- a/opensearchpy/helpers/index.pyi +++ b/opensearchpy/helpers/index.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index 9270da97..8fd37348 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/mapping.pyi b/opensearchpy/helpers/mapping.pyi index 8dab731a..99a82935 100644 --- a/opensearchpy/helpers/mapping.pyi +++ b/opensearchpy/helpers/mapping.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index e132254b..784435d6 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/query.pyi b/opensearchpy/helpers/query.pyi index a963ef05..673e83f9 100644 --- a/opensearchpy/helpers/query.pyi +++ b/opensearchpy/helpers/query.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/__init__.py b/opensearchpy/helpers/response/__init__.py index 91e4c044..d4792b11 100644 --- a/opensearchpy/helpers/response/__init__.py +++ b/opensearchpy/helpers/response/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/__init__.pyi b/opensearchpy/helpers/response/__init__.pyi index 3f3af097..f592e46a 100644 --- a/opensearchpy/helpers/response/__init__.pyi +++ b/opensearchpy/helpers/response/__init__.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/aggs.py b/opensearchpy/helpers/response/aggs.py index a5e2e22d..c8e7d5dd 100644 --- a/opensearchpy/helpers/response/aggs.py +++ b/opensearchpy/helpers/response/aggs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/aggs.pyi b/opensearchpy/helpers/response/aggs.pyi index ba92e56b..d943dbdd 100644 --- a/opensearchpy/helpers/response/aggs.pyi +++ b/opensearchpy/helpers/response/aggs.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/hit.py b/opensearchpy/helpers/response/hit.py index cf70a821..8f6230aa 100644 --- a/opensearchpy/helpers/response/hit.py +++ b/opensearchpy/helpers/response/hit.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/hit.pyi b/opensearchpy/helpers/response/hit.pyi index ae3cdf00..7597832d 100644 --- a/opensearchpy/helpers/response/hit.pyi +++ b/opensearchpy/helpers/response/hit.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 0652b60a..30c59a92 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/search.pyi b/opensearchpy/helpers/search.pyi index 92b46243..49eecb0d 100644 --- a/opensearchpy/helpers/search.pyi +++ b/opensearchpy/helpers/search.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index 436909e7..ad6e9c65 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/test.py b/opensearchpy/helpers/test.py index 9338636a..e2467584 100644 --- a/opensearchpy/helpers/test.py +++ b/opensearchpy/helpers/test.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/test.pyi b/opensearchpy/helpers/test.pyi index 1363f821..a4d2302a 100644 --- a/opensearchpy/helpers/test.pyi +++ b/opensearchpy/helpers/test.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/update_by_query.py b/opensearchpy/helpers/update_by_query.py index 3be888bf..32c7b705 100644 --- a/opensearchpy/helpers/update_by_query.py +++ b/opensearchpy/helpers/update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/update_by_query.pyi b/opensearchpy/helpers/update_by_query.pyi index 90597033..c0baf631 100644 --- a/opensearchpy/helpers/update_by_query.pyi +++ b/opensearchpy/helpers/update_by_query.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 04f2ee37..4cd9dad8 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/utils.pyi b/opensearchpy/helpers/utils.pyi index 74783974..decb7382 100644 --- a/opensearchpy/helpers/utils.pyi +++ b/opensearchpy/helpers/utils.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/wrappers.py b/opensearchpy/helpers/wrappers.py index 19cf3dec..968909a6 100644 --- a/opensearchpy/helpers/wrappers.py +++ b/opensearchpy/helpers/wrappers.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/wrappers.pyi b/opensearchpy/helpers/wrappers.pyi index fc79c384..704159dc 100644 --- a/opensearchpy/helpers/wrappers.pyi +++ b/opensearchpy/helpers/wrappers.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/__init__.py b/opensearchpy/plugins/__init__.py index 2f42da79..b0a5fb09 100644 --- a/opensearchpy/plugins/__init__.py +++ b/opensearchpy/plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/__init__.pyi b/opensearchpy/plugins/__init__.pyi index 6c0097cd..22c54ac8 100644 --- a/opensearchpy/plugins/__init__.pyi +++ b/opensearchpy/plugins/__init__.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/alerting.py b/opensearchpy/plugins/alerting.py index defbf326..66b759a4 100644 --- a/opensearchpy/plugins/alerting.py +++ b/opensearchpy/plugins/alerting.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/alerting.pyi b/opensearchpy/plugins/alerting.pyi index d712e762..4454bf3c 100644 --- a/opensearchpy/plugins/alerting.pyi +++ b/opensearchpy/plugins/alerting.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/index_management.py b/opensearchpy/plugins/index_management.py index 435ab8d4..9f4f5a6e 100644 --- a/opensearchpy/plugins/index_management.py +++ b/opensearchpy/plugins/index_management.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/index_management.pyi b/opensearchpy/plugins/index_management.pyi index 24a59dc9..d4a6dbad 100644 --- a/opensearchpy/plugins/index_management.pyi +++ b/opensearchpy/plugins/index_management.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/serializer.py b/opensearchpy/serializer.py index 295c4af0..7463dbcc 100644 --- a/opensearchpy/serializer.py +++ b/opensearchpy/serializer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/serializer.pyi b/opensearchpy/serializer.pyi index c68f51ca..6d798cce 100644 --- a/opensearchpy/serializer.pyi +++ b/opensearchpy/serializer.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index 301955df..c36178b2 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/transport.pyi b/opensearchpy/transport.pyi index dfdcedb8..fe33cfda 100644 --- a/opensearchpy/transport.pyi +++ b/opensearchpy/transport.pyi @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/aws/search-requests.py b/samples/aws/search-requests.py index 1f14f55e..885c4693 100644 --- a/samples/aws/search-requests.py +++ b/samples/aws/search-requests.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/aws/search-urllib3.py b/samples/aws/search-urllib3.py index 46d6a89f..8fccfe9f 100644 --- a/samples/aws/search-urllib3.py +++ b/samples/aws/search-urllib3.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/bulk/bulk-array.py b/samples/bulk/bulk-array.py index 8df6fa63..dea7fae1 100755 --- a/samples/bulk/bulk-array.py +++ b/samples/bulk/bulk-array.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/bulk/bulk-helpers.py b/samples/bulk/bulk-helpers.py index 1210ee86..b6aff98f 100755 --- a/samples/bulk/bulk-helpers.py +++ b/samples/bulk/bulk-helpers.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/bulk/bulk-ld.py b/samples/bulk/bulk-ld.py index 5487c68f..299e16d3 100755 --- a/samples/bulk/bulk-ld.py +++ b/samples/bulk/bulk-ld.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/hello/hello-async.py b/samples/hello/hello-async.py index 572ef91c..cebe703d 100755 --- a/samples/hello/hello-async.py +++ b/samples/hello/hello-async.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/hello/hello.py b/samples/hello/hello.py index d72c2ab7..20363d14 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/json/hello-async.py b/samples/json/hello-async.py index aa4840c4..4ce93dbe 100755 --- a/samples/json/hello-async.py +++ b/samples/json/hello-async.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/json/hello.py b/samples/json/hello.py index d5b8e70f..f0c81640 100755 --- a/samples/json/hello.py +++ b/samples/json/hello.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn-async-basics.py b/samples/knn/knn-async-basics.py index c237aa46..f92acfe7 100755 --- a/samples/knn/knn-async-basics.py +++ b/samples/knn/knn-async-basics.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn-basics.py b/samples/knn/knn-basics.py index 7868df7e..942118a8 100755 --- a/samples/knn/knn-basics.py +++ b/samples/knn/knn-basics.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn-boolean-filter.py b/samples/knn/knn-boolean-filter.py index a99b1683..6a69b5f8 100755 --- a/samples/knn/knn-boolean-filter.py +++ b/samples/knn/knn-boolean-filter.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn-efficient-filter.py b/samples/knn/knn-efficient-filter.py index 357eeb6a..569e4685 100755 --- a/samples/knn/knn-efficient-filter.py +++ b/samples/knn/knn-efficient-filter.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/security/roles.py b/samples/security/roles.py index a77d6eb0..0f0f2f61 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/security/users.py b/samples/security/users.py index b4bb8e3b..d33bd058 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/TestHttpServer.py b/test_opensearchpy/TestHttpServer.py index e96670cc..d9fb8ede 100644 --- a/test_opensearchpy/TestHttpServer.py +++ b/test_opensearchpy/TestHttpServer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/__init__.py b/test_opensearchpy/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/__init__.py +++ b/test_opensearchpy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index ca9db82a..7c8eb9ca 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/__init__.py b/test_opensearchpy/test_async/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/__init__.py +++ b/test_opensearchpy/test_async/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/__init__.py b/test_opensearchpy/test_async/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/test_helpers/__init__.py +++ b/test_opensearchpy/test_async/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index 56a6bf31..ca0c8d4c 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 44aaf1b5..26d49bf0 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index 88344cdb..34e18008 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index 4ba51ce2..1958f80f 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index a4fb2b24..7c9e799f 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index 5df66804..784193ee 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index c535f15a..340bd1b7 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index a362f451..282a61c7 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index a2ab2657..794aeb53 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 42c37edb..2c49aca3 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/__init__.py b/test_opensearchpy/test_async/test_server/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/__init__.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index d5901d68..e6d79c46 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 7355c71d..425eb2c7 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index 1194304e..bc2df5ba 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 172dfbfc..650c7b39 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index ab8ae552..9f2d919b 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index 26f452ca..cc489052 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index c05fd0ec..1dca7959 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index 54889dc8..8eb202f7 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index 9c4e7fb6..2db68326 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_plugins/__init__.py b/test_opensearchpy/test_async/test_server/test_plugins/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/__init__.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index 0773aab0..bb8509dc 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server_secured/__init__.py b/test_opensearchpy/test_async/test_server_secured/__init__.py index 6c0097cd..22c54ac8 100644 --- a/test_opensearchpy/test_async/test_server_secured/__init__.py +++ b/test_opensearchpy/test_async/test_server_secured/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index c41b86a8..2a5ad5a3 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index 0a5747ca..ecbd769a 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_cluster.py b/test_opensearchpy/test_client/test_cluster.py index 15c43d5f..a66072cd 100644 --- a/test_opensearchpy/test_client/test_cluster.py +++ b/test_opensearchpy/test_client/test_cluster.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_indices.py b/test_opensearchpy/test_client/test_indices.py index d6737378..f3e48f1b 100644 --- a/test_opensearchpy/test_client/test_indices.py +++ b/test_opensearchpy/test_client/test_indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/__init__.py b/test_opensearchpy/test_client/test_plugins/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_client/test_plugins/__init__.py +++ b/test_opensearchpy/test_client/test_plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/test_alerting.py b/test_opensearchpy/test_client/test_plugins/test_alerting.py index 62827655..a59ad04e 100644 --- a/test_opensearchpy/test_client/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_client/test_plugins/test_alerting.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/test_index_management.py b/test_opensearchpy/test_client/test_plugins/test_index_management.py index 6b126038..2c744e19 100644 --- a/test_opensearchpy/test_client/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_client/test_plugins/test_index_management.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index e8546484..6ce12a46 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py index eb6187b4..92265733 100644 --- a/test_opensearchpy/test_client/test_remote_store.py +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py index 11434a17..3caf8d5f 100644 --- a/test_opensearchpy/test_client/test_requests.py +++ b/test_opensearchpy/test_client/test_requests.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py index 227164eb..fa63133b 100644 --- a/test_opensearchpy/test_client/test_urllib3.py +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_connection/__init__.py b/test_opensearchpy/test_connection/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_connection/__init__.py +++ b/test_opensearchpy/test_connection/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index 02686e44..f08b6f24 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_exceptions.py b/test_opensearchpy/test_exceptions.py index 77a97a91..0b4150fb 100644 --- a/test_opensearchpy/test_exceptions.py +++ b/test_opensearchpy/test_exceptions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/__init__.py b/test_opensearchpy/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_helpers/__init__.py +++ b/test_opensearchpy/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 13059ccc..057e7f16 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index 086bde17..d2da16e0 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index 066fc9d4..9fcc68d1 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index 288eab3a..df30ad69 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index 40048bc6..7163c09e 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index 822440a4..ad042c58 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 46707f2c..95acfbe5 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index f07c633b..83fe8a08 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index 91c7a709..dae61a00 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index d298a0a0..336f8fda 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index 7a620736..c651fe2f 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index b86f8002..e8d9f5aa 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index c05b9fc3..c49353c5 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index 78d29958..164e6a5d 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/conftest.py b/test_opensearchpy/test_server/conftest.py index 03306fcf..128c33eb 100644 --- a/test_opensearchpy/test_server/conftest.py +++ b/test_opensearchpy/test_server/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/__init__.py b/test_opensearchpy/test_server/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_server/test_helpers/__init__.py +++ b/test_opensearchpy/test_server/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index 2230edb0..fcb65fde 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index 9b4f5849..d0073c53 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index f8aa612a..6a507a9f 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 059a983a..91e816b4 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index 1bb6ce12..f459afb2 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index 6b9ee50c..f7469d18 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 7df4a737..84525b01 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index d5d84469..a9278159 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index b22db642..81a75802 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_plugins/__init__.py b/test_opensearchpy/test_server/test_plugins/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_server/test_plugins/__init__.py +++ b/test_opensearchpy/test_server/test_plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 306993f2..e4c5cb3f 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server_secured/__init__.py b/test_opensearchpy/test_server_secured/__init__.py index 6c0097cd..22c54ac8 100644 --- a/test_opensearchpy/test_server_secured/__init__.py +++ b/test_opensearchpy/test_server_secured/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/aliased_types.py b/test_opensearchpy/test_types/aliased_types.py index f7a93e09..6d4a5a64 100644 --- a/test_opensearchpy/test_types/aliased_types.py +++ b/test_opensearchpy/test_types/aliased_types.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/async_types.py b/test_opensearchpy/test_types/async_types.py index b26b5d67..e6275662 100644 --- a/test_opensearchpy/test_types/async_types.py +++ b/test_opensearchpy/test_types/async_types.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/sync_types.py b/test_opensearchpy/test_types/sync_types.py index d772342b..df6634c4 100644 --- a/test_opensearchpy/test_types/sync_types.py +++ b/test_opensearchpy/test_types/sync_types.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index 0c07a012..41497808 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/utils/build-dists.py b/utils/build-dists.py index e6706c57..c52421e7 100644 --- a/utils/build-dists.py +++ b/utils/build-dists.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/utils/generate-api.py b/utils/generate-api.py index a4032765..049038a4 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to From 4357685a68317ba60c804657fe3404e875905c23 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 26 Oct 2023 22:55:50 -0400 Subject: [PATCH 39/80] Added samples, benchmarks and docs for nox format. (#556) * Added samples for nox format. Signed-off-by: dblock * Added space after #!/usr/bin/env python. Signed-off-by: dblock * Added benchmarks and docs. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + benchmarks/bench_async.py | 5 +- benchmarks/bench_info_sync.py | 6 +- benchmarks/bench_sync.py | 8 +- benchmarks/bench_sync_async.py | 4 + benchmarks/thread_with_return_value.py | 6 +- docs/source/conf.py | 9 + noxfile.py | 3 + .../advanced_index_actions_sample.py | 92 +++++---- samples/aws/search-requests.py | 63 +++--- samples/aws/search-urllib3.py | 63 +++--- samples/bulk/bulk-array.py | 43 ++-- samples/bulk/bulk-helpers.py | 39 ++-- samples/bulk/bulk-ld.py | 46 ++--- samples/hello/hello-async.py | 136 ++++++------- samples/hello/hello.py | 73 +++---- .../index_template/index_template_sample.py | 190 ++++++++---------- samples/json/hello-async.py | 95 ++++----- samples/json/hello.py | 57 +++--- samples/knn/knn-async-basics.py | 68 +++---- samples/knn/knn-basics.py | 64 +++--- samples/knn/knn-boolean-filter.py | 86 ++++---- samples/knn/knn-efficient-filter.py | 171 ++++++++++------ samples/security/roles.py | 37 ++-- samples/security/users.py | 17 +- test_opensearchpy/run_tests.py | 6 +- utils/generate-api.py | 6 +- utils/license-headers.py | 67 +++--- 28 files changed, 740 insertions(+), 721 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 02826288..225d5c5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added `remote store` client APIs ([#552](https://github.com/opensearch-project/opensearch-py/pull/552)) - Added `nox -rs generate` ([#554](https://github.com/opensearch-project/opensearch-py/pull/554)) - Added a utf-8 header to all .py files ([#557](https://github.com/opensearch-project/opensearch-py/pull/557)) +- Added `samples`, `benchmarks` and `docs` to `nox -rs format` ([#556](https://github.com/opensearch-project/opensearch-py/pull/556)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index 015801f4..c7eb5714 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -6,6 +6,9 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. import asyncio import uuid @@ -25,7 +28,7 @@ async def index_records(client, item_count): client.index( index=index_name, body={ - "title": f"Moneyball", + "title": "Moneyball", "director": "Bennett Miller", "year": "2011", }, diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 618954d7..229a2e4d 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -6,6 +6,10 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import logging import sys @@ -36,7 +40,7 @@ def get_info(client, request_count): tt = 0 for n in range(request_count): start = time.time() * 1000 - rc = client.info() + client.info() total_time = time.time() * 1000 - start tt += total_time return tt diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 146974f1..e201eaba 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -6,8 +6,13 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. import json +import logging +import sys import time import uuid @@ -21,9 +26,6 @@ index_name = "test-index-sync" item_count = 1000 -import logging -import sys - root = logging.getLogger() # root.setLevel(logging.DEBUG) # logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) diff --git a/benchmarks/bench_sync_async.py b/benchmarks/bench_sync_async.py index 8c43e278..7950dc64 100644 --- a/benchmarks/bench_sync_async.py +++ b/benchmarks/bench_sync_async.py @@ -6,6 +6,10 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import bench_async import bench_sync diff --git a/benchmarks/thread_with_return_value.py b/benchmarks/thread_with_return_value.py index 46fefe1f..b6bc9c09 100644 --- a/benchmarks/thread_with_return_value.py +++ b/benchmarks/thread_with_return_value.py @@ -1,11 +1,13 @@ -#!/usr/bin/env python - # -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + from threading import Thread diff --git a/docs/source/conf.py b/docs/source/conf.py index ea677630..133a2564 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,3 +1,12 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full diff --git a/noxfile.py b/noxfile.py index 04374cd4..a9cd9068 100644 --- a/noxfile.py +++ b/noxfile.py @@ -34,6 +34,9 @@ "opensearchpy/", "test_opensearchpy/", "utils/", + "samples/", + "benchmarks/", + "docs/", ) diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index 391d36b9..96d7d742 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -1,6 +1,17 @@ -from opensearchpy import OpenSearch +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. import time +from opensearchpy import OpenSearch # For cleaner output, comment in the two lines below to disable warnings and informational messages # import urllib3 @@ -10,73 +21,84 @@ def test_opensearch_examples(): # Set up client = OpenSearch( - hosts=['https://localhost:9200'], + hosts=["https://localhost:9200"], use_ssl=True, verify_certs=False, - http_auth=('admin', 'admin') + http_auth=("admin", "admin"), ) - client.indices.create(index='movies') + client.indices.create(index="movies") print("'movies' index created!") - + # Test Clear Index Cache - client.indices.clear_cache(index='movies') + client.indices.clear_cache(index="movies") print("Cache for 'movies' index cleared!") - client.indices.clear_cache(index='movies', query=True) + client.indices.clear_cache(index="movies", query=True) print("Query cache for 'movies' index cleared!") - client.indices.clear_cache(index='movies', fielddata=True, request=True) + client.indices.clear_cache(index="movies", fielddata=True, request=True) print("Field data and request cache for 'movies' index cleared!") - + # Test Flush Index - client.indices.flush(index='movies') + client.indices.flush(index="movies") print("'movies' index flushed!") - + # Test Refresh Index - client.indices.refresh(index='movies') + client.indices.refresh(index="movies") print("'movies' index refreshed!") - + # Test Close or Open Index - client.indices.close(index='movies') + client.indices.close(index="movies") print("'movies' index closed!") time.sleep(2) # add sleep to ensure the index has time to close - client.indices.open(index='movies') + client.indices.open(index="movies") print("'movies' index opened!") - + # Test Force Merge Index - client.indices.forcemerge(index='movies') + client.indices.forcemerge(index="movies") print("'movies' index force merged!") - + # Test Clone - client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': True}}}) + client.indices.put_settings( + index="movies", body={"index": {"blocks": {"write": True}}} + ) print("Write operations blocked for 'movies' index!") time.sleep(2) - client.indices.clone(index='movies', target='movies_clone') + client.indices.clone(index="movies", target="movies_clone") print("'movies' index cloned to 'movies_clone'!") - client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': False}}}) + client.indices.put_settings( + index="movies", body={"index": {"blocks": {"write": False}}} + ) print("Write operations enabled for 'movies' index!") - - # Test Split + + # Test Split client.indices.create( - index='books', - body={'settings': { - 'index': {'number_of_shards': 5, 'number_of_routing_shards': 30, 'blocks': {'write': True}}}} + index="books", + body={ + "settings": { + "index": { + "number_of_shards": 5, + "number_of_routing_shards": 30, + "blocks": {"write": True}, + } + } + }, ) print("'books' index created!") time.sleep(2) # add sleep to ensure the index has time to become read-only client.indices.split( - index='books', - target='bigger_books', - body={'settings': {'index': {'number_of_shards': 10 }}} + index="books", + target="bigger_books", + body={"settings": {"index": {"number_of_shards": 10}}}, ) print("'books' index split into 'bigger_books'!") - client.indices.put_settings(index='books', body={'index': {'blocks': {'write': False}}}) + client.indices.put_settings( + index="books", body={"index": {"blocks": {"write": False}}} + ) print("Write operations enabled for 'books' index!") - + # Cleanup - client.indices.delete(index=['movies', 'books', 'movies_clone', 'bigger_books']) + client.indices.delete(index=["movies", "books", "movies_clone", "bigger_books"]) print("All indices deleted!") - - if __name__ == "__main__": - test_opensearch_examples() \ No newline at end of file + test_opensearch_examples() diff --git a/samples/aws/search-requests.py b/samples/aws/search-requests.py index 885c4693..0af366f0 100644 --- a/samples/aws/search-requests.py +++ b/samples/aws/search-requests.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + # -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # @@ -9,62 +11,59 @@ # GitHub history for details. import logging - from os import environ from time import sleep from urllib.parse import urlparse from boto3 import Session -from opensearchpy import RequestsAWSV4SignerAuth, OpenSearch, RequestsHttpConnection + +from opensearchpy import OpenSearch, RequestsAWSV4SignerAuth, RequestsHttpConnection # verbose logging -logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) +logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -url = urlparse(environ['ENDPOINT']) -region = environ.get('AWS_REGION', 'us-east-1') -service = environ.get('SERVICE', 'es') +url = urlparse(environ["ENDPOINT"]) +region = environ.get("AWS_REGION", "us-east-1") +service = environ.get("SERVICE", "es") credentials = Session().get_credentials() auth = RequestsAWSV4SignerAuth(credentials, region, service) client = OpenSearch( - hosts=[{ - 'host': url.netloc, - 'port': url.port or 443 - }], - http_auth=auth, - use_ssl=True, - verify_certs=True, - connection_class=RequestsHttpConnection, - timeout=30 + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + timeout=30, ) # TODO: remove when OpenSearch Serverless adds support for / -if service == 'es': - info = client.info() - print(f"{info['version']['distribution']}: {info['version']['number']}") +if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") # create an index -index = 'movies' +index = "movies" client.indices.create(index=index) try: - # index data - document = {'director': 'Bennett Miller', 'title': 'Moneyball', 'year': 2011} - client.index(index=index, body=document, id='1') + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") - # wait for the document to index - sleep(1) + # wait for the document to index + sleep(1) - # search for the document - results = client.search(body={'query': {'match': {'director': 'miller'}}}) - for hit in results['hits']['hits']: - print(hit['_source']) + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) - # delete the document - client.delete(index=index, id='1') + # delete the document + client.delete(index=index, id="1") finally: - # delete the index - client.indices.delete(index=index) \ No newline at end of file + # delete the index + client.indices.delete(index=index) diff --git a/samples/aws/search-urllib3.py b/samples/aws/search-urllib3.py index 8fccfe9f..534caf40 100644 --- a/samples/aws/search-urllib3.py +++ b/samples/aws/search-urllib3.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + # -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # @@ -9,62 +11,59 @@ # GitHub history for details. import logging - from os import environ from time import sleep from urllib.parse import urlparse from boto3 import Session -from opensearchpy import Urllib3AWSV4SignerAuth, OpenSearch, Urllib3HttpConnection + +from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection # verbose logging -logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) +logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -url = urlparse(environ['ENDPOINT']) -region = environ.get('AWS_REGION', 'us-east-1') -service = environ.get('SERVICE', 'es') +url = urlparse(environ["ENDPOINT"]) +region = environ.get("AWS_REGION", "us-east-1") +service = environ.get("SERVICE", "es") credentials = Session().get_credentials() auth = Urllib3AWSV4SignerAuth(credentials, region, service) client = OpenSearch( - hosts=[{ - 'host': url.netloc, - 'port': url.port or 443 - }], - http_auth=auth, - use_ssl=True, - verify_certs=True, - connection_class=Urllib3HttpConnection, - timeout=30 + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=Urllib3HttpConnection, + timeout=30, ) # TODO: remove when OpenSearch Serverless adds support for / -if service == 'es': - info = client.info() - print(f"{info['version']['distribution']}: {info['version']['number']}") +if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") # create an index -index = 'movies' +index = "movies" client.indices.create(index=index) try: - # index data - document = {'director': 'Bennett Miller', 'title': 'Moneyball', 'year': 2011} - client.index(index=index, body=document, id='1') + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") - # wait for the document to index - sleep(1) + # wait for the document to index + sleep(1) - # search for the document - results = client.search(body={'query': {'match': {'director': 'miller'}}}) - for hit in results['hits']['hits']: - print(hit['_source']) + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) - # delete the document - client.delete(index=index, id='1') + # delete the document + client.delete(index=index, id="1") finally: - # delete the index - client.indices.delete(index=index) \ No newline at end of file + # delete the index + client.indices.delete(index=index) diff --git a/samples/bulk/bulk-array.py b/samples/bulk/bulk-array.py index dea7fae1..1859d541 100755 --- a/samples/bulk/bulk-array.py +++ b/samples/bulk/bulk-array.py @@ -6,55 +6,53 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os -import json from opensearchpy import OpenSearch # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "my-index" if not client.indices.exists(index_name): - - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "mappings":{ + "mappings": { "properties": { - "value": { - "type": "float" - }, + "value": {"type": "float"}, } } - } + }, ) # index data data = [] for i in range(100): - data.append({ "index": {"_index": index_name, "_id": i }}) - data.append({ "value": i }) + data.append({"index": {"_index": index_name, "_id": i}}) + data.append({"value": i}) rc = client.bulk(data) if rc["errors"]: - print(f"There were errors:") + print("There were errors:") for item in rc["items"]: print(f"{item['index']['status']}: {item['index']['error']['type']}") else: @@ -62,4 +60,3 @@ # delete index client.indices.delete(index=index_name) - diff --git a/samples/bulk/bulk-helpers.py b/samples/bulk/bulk-helpers.py index b6aff98f..3dc165c8 100755 --- a/samples/bulk/bulk-helpers.py +++ b/samples/bulk/bulk-helpers.py @@ -6,54 +6,51 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os -import json from opensearchpy import OpenSearch, helpers # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "my-index" if not client.indices.exists(index_name): - - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "mappings":{ + "mappings": { "properties": { - "value": { - "type": "float" - }, + "value": {"type": "float"}, } } - } + }, ) # index data data = [] for i in range(100): - data.append({ "_index": index_name, "_id": i, "value": i }) + data.append({"_index": index_name, "_id": i, "value": i}) rc = helpers.bulk(client, data) print(f"Bulk-inserted {rc[0]} items.") # delete index client.indices.delete(index=index_name) - diff --git a/samples/bulk/bulk-ld.py b/samples/bulk/bulk-ld.py index 299e16d3..fff0ae98 100755 --- a/samples/bulk/bulk-ld.py +++ b/samples/bulk/bulk-ld.py @@ -6,55 +6,54 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + -import os import json +import os from opensearchpy import OpenSearch # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "my-index" if not client.indices.exists(index_name): - - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "mappings":{ + "mappings": { "properties": { - "value": { - "type": "float" - }, + "value": {"type": "float"}, } } - } + }, ) # index data -data = '' +data = "" for i in range(100): - data += json.dumps({ "index": {"_index": index_name, "_id": i }}) + "\n" - data += json.dumps({ "value": i }) + "\n" + data += json.dumps({"index": {"_index": index_name, "_id": i}}) + "\n" + data += json.dumps({"value": i}) + "\n" rc = client.bulk(data) if rc["errors"]: - print(f"There were errors:") + print("There were errors:") for item in rc["items"]: print(f"{item['index']['status']}: {item['index']['error']['type']}") else: @@ -62,4 +61,3 @@ # delete index client.indices.delete(index=index_name) - diff --git a/samples/hello/hello-async.py b/samples/hello/hello-async.py index cebe703d..9975f575 100755 --- a/samples/hello/hello-async.py +++ b/samples/hello/hello-async.py @@ -6,103 +6,91 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import asyncio from opensearchpy import AsyncOpenSearch + async def main(): # connect to OpenSearch - host = 'localhost' + host = "localhost" port = 9200 - auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = AsyncOpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) try: - info = await client.info() - print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + info = await client.info() + print( + f"Welcome to {info['version']['distribution']} {info['version']['number']}!" + ) + + # create an index + + index_name = "test-index" + + index_body = {"settings": {"index": {"number_of_shards": 4}}} + + if not await client.indices.exists(index=index_name): + await client.indices.create(index_name, body=index_body) + + # add some documents to the index, asynchronously + await asyncio.gather( + *[ + client.index( + index=index_name, + body={ + "title": f"Moneyball {i}", + "director": "Bennett Miller", + "year": "2011", + }, + id=i, + ) + for i in range(10) + ] + ) - # create an index + # refresh the index + await client.indices.refresh(index=index_name) - index_name = 'test-index' + # search for a document + q = "miller" - index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 - } + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } - } - if not await client.indices.exists(index=index_name): - await client.indices.create( - index_name, - body=index_body + results = await client.search(body=query, index=index_name) + + for hit in results["hits"]["hits"]: + print(hit) + + # delete the documents + await asyncio.gather( + *[client.delete(index=index_name, id=i) for i in range(10)] ) - # add some documents to the index, asynchronously - await asyncio.gather(*[ - client.index( - index = index_name, - body = { - 'title': f"Moneyball {i}", - 'director': 'Bennett Miller', - 'year': '2011' - }, - id = i - ) for i in range(10) - ]) - - # refresh the index - await client.indices.refresh(index=index_name) - - # search for a document - q = 'miller' - - query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } - } - } - - results = await client.search( - body = query, - index = index_name - ) - - for hit in results["hits"]["hits"]: - print(hit) - - # delete the documents - await asyncio.gather(*[ - client.delete( - index = index_name, - id = i - ) for i in range(10) - ]) - - # delete the index - await client.indices.delete( - index = index_name - ) - - finally: - await client.close() + # delete the index + await client.indices.delete(index=index_name) + + finally: + await client.close() + if __name__ == "__main__": loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(main()) loop.close() - diff --git a/samples/hello/hello.py b/samples/hello/hello.py index 20363d14..0b589c9d 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -6,21 +6,25 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + from opensearchpy import OpenSearch # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) info = client.info() @@ -28,76 +32,45 @@ # create an index -index_name = 'test-index' +index_name = "test-index" -index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 - } - } -} +index_body = {"settings": {"index": {"number_of_shards": 4}}} -response = client.indices.create( - index_name, - body=index_body -) +response = client.indices.create(index_name, body=index_body) print(response) # add a document to the index -document = { - 'title': 'Moneyball', - 'director': 'Bennett Miller', - 'year': '2011' -} +document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} -id = '1' +id = "1" -response = client.index( - index = index_name, - body = document, - id = id, - refresh = True -) +response = client.index(index=index_name, body=document, id=id, refresh=True) print(response) # search for a document -q = 'miller' +q = "miller" query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } - } + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } -response = client.search( - body = query, - index = index_name -) +response = client.search(body=query, index=index_name) print(response) # delete the document -response = client.delete( - index = index_name, - id = id -) +response = client.delete(index=index_name, id=id) print(response) # delete the index -response = client.indices.delete( - index = index_name -) +response = client.indices.delete(index=index_name) print(response) diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py index dab504be..4fe580ac 100644 --- a/samples/index_template/index_template_sample.py +++ b/samples/index_template/index_template_sample.py @@ -1,143 +1,129 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. from opensearchpy import OpenSearch # Create a client instance client = OpenSearch( - hosts=['https://localhost:9200'], - use_ssl=True, - verify_certs=False, - http_auth=('admin', 'admin') + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), ) # You can create an index template to define default settings and mappings for indices of certain patterns. The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: client.indices.put_index_template( -name='books', -body={ - 'index_patterns': ['books-*'], - 'priority': 1, - 'template': { - 'settings': { - 'index': { - 'number_of_shards': 3, - 'number_of_replicas': 0 - } + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}}, + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } + }, + }, }, - 'mappings': { - 'properties': { - 'title': { 'type': 'text' }, - 'author': { 'type': 'text' }, - 'published_on': { 'type': 'date' }, - 'pages': { 'type': 'integer' } - } - } - } -} ) # Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: -client.indices.create(index='books-nonfiction') -print(client.indices.get(index='books-nonfiction')) +client.indices.create(index="books-nonfiction") +print(client.indices.get(index="books-nonfiction")) # If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: client.indices.put_index_template( -name='books', -body={ - 'index_patterns': ['books-*'], - 'priority': 1, - 'template': { - 'settings': { - 'index': { - 'number_of_shards': 3, - 'number_of_replicas': 0 - } - } - } -} + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, + }, ) client.indices.put_index_template( -name='books-fiction', -body={ - 'index_patterns': ['books-fiction-*'], - 'priority': 2, - 'template': { - 'settings': { - 'index': { - 'number_of_shards': 1, - 'number_of_replicas': 1 - } - } - } -} + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "priority": 2, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, + }, ) # # Test multiple index templates -client.indices.create(index='books-fiction-romance') -print(client.indices.get(index='books-fiction-romance')) +client.indices.create(index="books-fiction-romance") +print(client.indices.get(index="books-fiction-romance")) # Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: client.cluster.put_component_template( -name='books_mappings', -body={ - 'template': { - 'mappings': { - 'properties': { - 'title': { 'type': 'text' }, - 'author': { 'type': 'text' }, - 'published_on': { 'type': 'date' }, - 'pages': { 'type': 'integer' } - } - } - } -} + name="books_mappings", + body={ + "template": { + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } + } + } + }, ) client.indices.put_index_template( -name='books', -body={ - 'index_patterns': ['books-*'], - 'composed_of': ['books_mappings'], - 'priority': 4, - 'template': { - 'settings': { - 'index': { - 'number_of_shards': 3, - 'number_of_replicas': 0 - } - } - } -} + name="books", + body={ + "index_patterns": ["books-*"], + "composed_of": ["books_mappings"], + "priority": 4, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, + }, ) client.indices.put_index_template( -name='books-fiction', -body={ - 'index_patterns': ['books-fiction-*'], - 'composed_of': ['books_mappings'], - 'priority': 5, - 'template': { - 'settings': { - 'index': { - 'number_of_shards': 1, - 'number_of_replicas': 1 - } - } - } -} + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "composed_of": ["books_mappings"], + "priority": 5, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, + }, ) # Test composable index templates -client.indices.create(index='books-fiction-horror') -print(client.indices.get(index='books-fiction-horror')) +client.indices.create(index="books-fiction-horror") +print(client.indices.get(index="books-fiction-horror")) # Get an index template -print(client.indices.get_index_template(name='books')) +print(client.indices.get_index_template(name="books")) # Delete an index template -client.indices.delete_index_template(name='books') +client.indices.delete_index_template(name="books") # Cleanup -client.indices.delete(index='books-*') -client.indices.delete_index_template(name='books-fiction') -client.cluster.delete_component_template(name='books_mappings') \ No newline at end of file +client.indices.delete(index="books-*") +client.indices.delete_index_template(name="books-fiction") +client.cluster.delete_component_template(name="books_mappings") diff --git a/samples/json/hello-async.py b/samples/json/hello-async.py index 4ce93dbe..b9105d35 100755 --- a/samples/json/hello-async.py +++ b/samples/json/hello-async.py @@ -6,86 +6,91 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import asyncio from opensearchpy import AsyncOpenSearch + async def main(): # connect to OpenSearch - host = 'localhost' + host = "localhost" port = 9200 - auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = AsyncOpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) try: - info = await client.transport.perform_request('GET', '/') - print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + info = await client.transport.perform_request("GET", "/") + print( + f"Welcome to {info['version']['distribution']} {info['version']['number']}!" + ) - # create an index + # create an index - index_name = 'movies' + index_name = "movies" - index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 - } - } - } + index_body = {"settings": {"index": {"number_of_shards": 4}}} - print(await client.transport.perform_request("PUT", f"/{index_name}", body=index_body)) + print( + await client.transport.perform_request( + "PUT", f"/{index_name}", body=index_body + ) + ) - # add a document to the index + # add a document to the index - document = { - 'title': 'Moneyball', - 'director': 'Bennett Miller', - 'year': '2011' - } + document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} - id = '1' + id = "1" - print(await client.transport.perform_request("PUT", f"/{index_name}/_doc/{id}?refresh=true", body = document)) + print( + await client.transport.perform_request( + "PUT", f"/{index_name}/_doc/{id}?refresh=true", body=document + ) + ) - # search for a document + # search for a document - q = 'miller' + q = "miller" - query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } - } - print(await client.transport.perform_request("POST", f"/{index_name}/_search", body = query)) + print( + await client.transport.perform_request( + "POST", f"/{index_name}/_search", body=query + ) + ) - # delete the document + # delete the document - print(await client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}")) + print( + await client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}") + ) - # delete the index + # delete the index - print(await client.transport.perform_request("DELETE", f"/{index_name}")) + print(await client.transport.perform_request("DELETE", f"/{index_name}")) + finally: + await client.close() - finally: - await client.close() if __name__ == "__main__": loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(main()) loop.close() - diff --git a/samples/json/hello.py b/samples/json/hello.py index f0c81640..5df36f5f 100755 --- a/samples/json/hello.py +++ b/samples/json/hello.py @@ -6,67 +6,60 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + from opensearchpy import OpenSearch # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) -info = client.transport.perform_request('GET', '/') +info = client.transport.perform_request("GET", "/") print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") # create an index -index_name = 'movies' +index_name = "movies" -index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 - } - } -} +index_body = {"settings": {"index": {"number_of_shards": 4}}} print(client.transport.perform_request("PUT", f"/{index_name}", body=index_body)) # add a document to the index -document = { - 'title': 'Moneyball', - 'director': 'Bennett Miller', - 'year': '2011' -} +document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} -id = '1' +id = "1" -print(client.transport.perform_request("PUT", f"/{index_name}/_doc/{id}?refresh=true", body = document)) +print( + client.transport.perform_request( + "PUT", f"/{index_name}/_doc/{id}?refresh=true", body=document + ) +) # search for a document -q = 'miller' +q = "miller" query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } - } + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } -print(client.transport.perform_request("POST", f"/{index_name}/_search", body = query)) +print(client.transport.perform_request("POST", f"/{index_name}/_search", body=query)) # delete the document diff --git a/samples/knn/knn-async-basics.py b/samples/knn/knn-async-basics.py index f92acfe7..a7bb9d2f 100755 --- a/samples/knn/knn-async-basics.py +++ b/samples/knn/knn-async-basics.py @@ -6,29 +6,31 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import asyncio import os import random -from opensearchpy import AsyncOpenSearch, AsyncHttpConnection, helpers +from opensearchpy import AsyncHttpConnection, AsyncOpenSearch, helpers + async def main(): # connect to an instance of OpenSearch - host = os.getenv('HOST', default='localhost') - port = int(os.getenv('PORT', 9200)) - auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') - ) + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = AsyncOpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, connection_class=AsyncHttpConnection, - ssl_show_warn = False + ssl_show_warn=False, ) # check whether an index exists @@ -36,34 +38,32 @@ async def main(): dimensions = 5 if not await client.indices.exists(index_name): - await client.indices.create(index_name, + await client.indices.create( + index_name, body={ - "settings":{ - "index.knn": True - }, - "mappings":{ + "settings": {"index.knn": True}, + "mappings": { "properties": { - "values": { - "type": "knn_vector", - "dimension": dimensions - }, + "values": {"type": "knn_vector", "dimension": dimensions}, } - } - } + }, + }, ) # index data vectors = [] for i in range(10): vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append({ - "_index": index_name, - "_id": i, - "values": vec, - }) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + } + ) # bulk index await helpers.async_bulk(client, vectors) @@ -72,8 +72,8 @@ async def main(): # search vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} ...") search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} @@ -86,9 +86,9 @@ async def main(): await client.close() + if __name__ == "__main__": loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(main()) loop.close() - diff --git a/samples/knn/knn-basics.py b/samples/knn/knn-basics.py index 942118a8..96efb028 100755 --- a/samples/knn/knn-basics.py +++ b/samples/knn/knn-basics.py @@ -6,6 +6,10 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os import random @@ -14,19 +18,16 @@ # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists @@ -34,34 +35,32 @@ dimensions = 5 if not client.indices.exists(index_name): - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "settings":{ - "index.knn": True - }, - "mappings":{ + "settings": {"index.knn": True}, + "mappings": { "properties": { - "values": { - "type": "knn_vector", - "dimension": dimensions - }, + "values": {"type": "knn_vector", "dimension": dimensions}, } - } - } + }, + }, ) # index data vectors = [] for i in range(10): vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append({ - "_index": index_name, - "_id": i, - "values": vec, - }) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + } + ) # bulk index helpers.bulk(client, vectors) @@ -70,8 +69,8 @@ # search vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) +for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} ...") search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} @@ -81,4 +80,3 @@ # delete index client.indices.delete(index=index_name) - diff --git a/samples/knn/knn-boolean-filter.py b/samples/knn/knn-boolean-filter.py index 6a69b5f8..5ae7704c 100755 --- a/samples/knn/knn-boolean-filter.py +++ b/samples/knn/knn-boolean-filter.py @@ -6,6 +6,10 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os import random @@ -14,19 +18,16 @@ # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists @@ -34,38 +35,34 @@ dimensions = 5 if not client.indices.exists(index_name): - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "settings":{ - "index.knn": True - }, - "mappings":{ + "settings": {"index.knn": True}, + "mappings": { "properties": { - "values": { - "type": "knn_vector", - "dimension": dimensions - }, + "values": {"type": "knn_vector", "dimension": dimensions}, } - } - } + }, + }, ) # index data vectors = [] -genres = ['fiction', 'drama', 'romance'] +genres = ["fiction", "drama", "romance"] for i in range(3000): vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append({ - "_index": index_name, - "_id": i, - "values": vec, - "metadata": { - "genre": random.choice(genres) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + "metadata": {"genre": random.choice(genres)}, } - }) + ) # bulk index helpers.bulk(client, vectors) @@ -75,30 +72,15 @@ # search genre = random.choice(genres) vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) +for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} with the '{genre}' genre ...") search_query = { "query": { "bool": { - "filter": { - "bool": { - "must": [{ - "term": { - "metadata.genre": genre - } - }] - } - }, - "must": { - "knn": { - "values": { - "vector": vec, - "k": 5 - } - } - } + "filter": {"bool": {"must": [{"term": {"metadata.genre": genre}}]}}, + "must": {"knn": {"values": {"vector": vec, "k": 5}}}, } } } diff --git a/samples/knn/knn-efficient-filter.py b/samples/knn/knn-efficient-filter.py index 569e4685..cbfd41ad 100755 --- a/samples/knn/knn-efficient-filter.py +++ b/samples/knn/knn-efficient-filter.py @@ -6,75 +6,145 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os -import random from opensearchpy import OpenSearch, helpers # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "hotels-index" if not client.indices.exists(index_name): - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "settings":{ + "settings": { "index.knn": True, "knn.algo_param.ef_search": 100, "number_of_shards": 1, - "number_of_replicas": 0 + "number_of_replicas": 0, }, - "mappings":{ + "mappings": { "properties": { "location": { - "type": "knn_vector", + "type": "knn_vector", "dimension": 2, "method": { "name": "hnsw", "space_type": "l2", "engine": "lucene", - "parameters": { - "ef_construction": 100, - "m": 16 - } - } + "parameters": {"ef_construction": 100, "m": 16}, + }, }, } - } - } + }, + }, ) # index data vectors = [ - { "_index": "hotels-index", "_id": "1", "location": [5.2, 4.4], "parking" : "true", "rating" : 5 }, - { "_index": "hotels-index", "_id": "2", "location": [5.2, 3.9], "parking" : "false", "rating" : 4 }, - { "_index": "hotels-index", "_id": "3", "location": [4.9, 3.4], "parking" : "true", "rating" : 9 }, - { "_index": "hotels-index", "_id": "4", "location": [4.2, 4.6], "parking" : "false", "rating" : 6}, - { "_index": "hotels-index", "_id": "5", "location": [3.3, 4.5], "parking" : "true", "rating" : 8 }, - { "_index": "hotels-index", "_id": "6", "location": [6.4, 3.4], "parking" : "true", "rating" : 9 }, - { "_index": "hotels-index", "_id": "7", "location": [4.2, 6.2], "parking" : "true", "rating" : 5 }, - { "_index": "hotels-index", "_id": "8", "location": [2.4, 4.0], "parking" : "true", "rating" : 8 }, - { "_index": "hotels-index", "_id": "9", "location": [1.4, 3.2], "parking" : "false", "rating" : 5 }, - { "_index": "hotels-index", "_id": "10", "location": [7.0, 9.9], "parking" : "true", "rating" : 9 }, - { "_index": "hotels-index", "_id": "11", "location": [3.0, 2.3], "parking" : "false", "rating" : 6 }, - { "_index": "hotels-index", "_id": "12", "location": [5.0, 1.0], "parking" : "true", "rating" : 3 }, + { + "_index": "hotels-index", + "_id": "1", + "location": [5.2, 4.4], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "2", + "location": [5.2, 3.9], + "parking": "false", + "rating": 4, + }, + { + "_index": "hotels-index", + "_id": "3", + "location": [4.9, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "4", + "location": [4.2, 4.6], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "5", + "location": [3.3, 4.5], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "6", + "location": [6.4, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "7", + "location": [4.2, 6.2], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "8", + "location": [2.4, 4.0], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "9", + "location": [1.4, 3.2], + "parking": "false", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "10", + "location": [7.0, 9.9], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "11", + "location": [3.0, 2.3], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "12", + "location": [5.0, 1.0], + "parking": "true", + "rating": 3, + }, ] helpers.bulk(client, vectors) @@ -87,30 +157,19 @@ "query": { "knn": { "location": { - "vector": [5, 4], - "k": 3, - "filter": { - "bool": { - "must": [ - { - "range": { - "rating": { - "gte": 8, - "lte": 10 - } - } - }, - { - "term": { - "parking": "true" - } - } + "vector": [5, 4], + "k": 3, + "filter": { + "bool": { + "must": [ + {"range": {"rating": {"gte": 8, "lte": 10}}}, + {"term": {"parking": "true"}}, ] } - } + }, } } - } + }, } results = client.search(index=index_name, body=search_query) diff --git a/samples/security/roles.py b/samples/security/roles.py index 0f0f2f61..8a2d1ef5 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -6,6 +6,9 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. # A basic OpenSearch sample that create and manage roles. @@ -14,16 +17,16 @@ # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # Create a Role @@ -31,16 +34,16 @@ role_name = "test-role" role_content = { - "cluster_permissions": ["cluster_monitor"], - "index_permissions": [ - { - "index_patterns": ["index", "test-*"], - "allowed_actions": [ - "data_access", - "indices_monitor", - ], - } - ], + "cluster_permissions": ["cluster_monitor"], + "index_permissions": [ + { + "index_patterns": ["index", "test-*"], + "allowed_actions": [ + "data_access", + "indices_monitor", + ], + } + ], } response = client.security.create_role(role_name, body=role_content) diff --git a/samples/security/users.py b/samples/security/users.py index d33bd058..0a778b8d 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -6,6 +6,9 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. # A basic OpenSearch sample that create and manage users. @@ -14,16 +17,16 @@ # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # Create a User diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index 7c8eb9ca..55f1e586 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -1,10 +1,14 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. diff --git a/utils/generate-api.py b/utils/generate-api.py index 049038a4..7e241236 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -1,10 +1,14 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. diff --git a/utils/license-headers.py b/utils/license-headers.py index 255097d8..67b0ef4a 100644 --- a/utils/license-headers.py +++ b/utils/license-headers.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -6,24 +7,6 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - """Script which verifies that all source files have a license header. Has two modes: 'fix' and 'check'. 'fix' fixes problems, 'check' will @@ -33,20 +16,20 @@ import os import re import sys -from itertools import chain from typing import Iterator, List -lines_to_keep = ["# -*- coding: utf-8 -*-\n", "#!/usr/bin/env python\n"] -license_header_lines = [ - "# SPDX-License-Identifier: Apache-2.0\n", - "#\n", - "# The OpenSearch Contributors require contributions made to\n", - "# this file be licensed under the Apache-2.0 license or a\n", - "# compatible open source license.\n", - "#\n", - "# Modifications Copyright OpenSearch Contributors. See\n", - "# GitHub history for details.\n", -] +lines_to_keep = ["# -*- coding: utf-8 -*-", "#!/usr/bin/env python"] + +license_header = """ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +""".strip() def find_files_to_fix(sources: List[str]) -> Iterator[str]: @@ -67,20 +50,18 @@ def find_files_to_fix(sources: List[str]) -> Iterator[str]: def does_file_need_fix(filepath: str) -> bool: if not re.search(r"\.pyi?$", filepath): return False + existing_header = "" with open(filepath, mode="r") as f: - first_license_line = None for line in f: - if line == license_header_lines[0]: - first_license_line = line + line = line.strip() + if len(line) == 0 or line in lines_to_keep: + pass + elif line[0] == "#": + existing_header += line + existing_header += "\n" + else: break - elif line not in lines_to_keep: - return True - for header_line, line in zip( - license_header_lines, chain((first_license_line,), f) - ): - if line != header_line: - return True - return False + return not existing_header.startswith(license_header) def add_header_to_file(filepath: str) -> None: @@ -88,9 +69,9 @@ def add_header_to_file(filepath: str) -> None: lines = list(f) i = 0 for i, line in enumerate(lines): - if line not in lines_to_keep: + if len(line) > 0 and line not in lines_to_keep: break - lines = lines[:i] + license_header_lines + lines[i:] + lines = lines[:i] + [license_header] + lines[i:] with open(filepath, mode="w") as f: f.truncate() f.write("".join(lines)) From 9dbf6896e15bb8f10f44cf554aca2947d943f7be Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Mon, 30 Oct 2023 20:02:30 -0700 Subject: [PATCH 40/80] Applied document lifecycle guide & sample. (#559) Applied pull number to CHNAGELOG.md Signed-off-by: Djcarrillo6 Signed-off-by: roma2023 --- CHANGELOG.md | 1 + USER_GUIDE.md | 1 + guides/document_lifecycle.md | 182 ++++++++++++++++++ .../document_lifecycle_sample.py | 94 +++++++++ 4 files changed, 278 insertions(+) create mode 100644 guides/document_lifecycle.md create mode 100644 samples/document_lifecycle/document_lifecycle_sample.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 225d5c5a..f2d503a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added `nox -rs generate` ([#554](https://github.com/opensearch-project/opensearch-py/pull/554)) - Added a utf-8 header to all .py files ([#557](https://github.com/opensearch-project/opensearch-py/pull/557)) - Added `samples`, `benchmarks` and `docs` to `nox -rs format` ([#556](https://github.com/opensearch-project/opensearch-py/pull/556)) +- Added guide on the document lifecycle API(s) ([#559](https://github.com/opensearch-project/opensearch-py/pull/559)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/USER_GUIDE.md b/USER_GUIDE.md index 90387b43..753485e5 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -158,6 +158,7 @@ print(response) - [Advanced Index Actions](guides/advanced_index_actions.md) - [Making Raw JSON REST Requests](guides/json.md) - [Connection Classes](guides/connection_classes.md) +- [Document Lifecycle](guides/document_lifecycle.md) ## Plugins diff --git a/guides/document_lifecycle.md b/guides/document_lifecycle.md new file mode 100644 index 00000000..fcad9e8c --- /dev/null +++ b/guides/document_lifecycle.md @@ -0,0 +1,182 @@ +# Document Lifecycle Guide +- [Document Lifecycle](#document-lifecycle) + - [Setup](#setup) + - [Document API Actions](#document-api-actions) + - [Create a new document with specified ID](#create-a-new-document-with-specified-id) + - [Create a new document with auto-generated ID](#create-a-new-document-with-auto-generated-id) + - [Get a document](#get-a-document) + - [Get multiple documents](#get-multiple-documents) + - [Check if a document exists](#check-if-a-document-exists) + - [Update a document](#update-a-document) + - [Update multiple documents by query](#update-multiple-documents-by-query) + - [Delete a document](#delete-a-document) + - [Delete multiple documents by query](#delete-multiple-documents-by-query) + - [Cleanup](#cleanup) + + +# Document Lifecycle +This guide covers OpenSearch Python Client API actions for Document Lifecycle. You'll learn how to create, read, update, and delete documents in your OpenSearch cluster. Whether you're new to OpenSearch or an experienced user, this guide provides the information you need to manage your document lifecycle effectively. + +## Setup +Assuming you have OpenSearch running locally on port 9200, you can create a client instance +with the following code: + +```python +from opensearchpy import OpenSearch +client = OpenSearch( + hosts=['https://localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') +) +``` + +Next, create an index named `movies` with the default settings: + +```python +index = 'movies' +if not client.indices.exists(index=index): + client.indices.create(index=index) +``` + +## Document API Actions + +### Create a new document with specified ID +To create a new document, use the `create` or `index` API action. The following code creates two new documents with IDs of `1` and `2`: + +```python +client.create(index=index, id=1, body={'title': 'Beauty and the Beast', 'year': 1991}) +client.create(index=index, id=2, body={'title': 'Beauty and the Beast - Live Action', 'year': 2017}) +``` + +Note that the `create` action is NOT idempotent. If you try to create a document with an ID that already exists, the request will fail: + +```python +try: + client.create(index=index, id=1, body={'title': 'Just Another Movie'}) +except Exception as e: + print(e) +``` + +The `index` action, on the other hand, is idempotent. If you try to index a document with an existing ID, the request will succeed and overwrite the existing document. Note that no new document will be created in this case. You can think of the `index` action as an upsert: + +```python +client.index(index=index, id=2, body={'title': 'Updated Title'}) +client.index(index=index, id=2, body={'title': 'The Lion King', 'year': 1994}) +``` + +### Create a new document with auto-generated ID +You can also create a new document with an auto-generated ID by omitting the `id` parameter. The following code creates documents with an auto-generated IDs in the `movies` index: + +```python +client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) +``` + +In this case, the ID of the created document in the `result` field of the response body: + +```python +{ + "_index": "movies", + "_type": "_doc", + "_id": "1", + "_version": 1, + "result": "created", + "_shards": { + "total": 2, + "successful": 1, + "failed": 0 + }, + "_seq_no": 0, + "_primary_term": 1 +} +``` + +### Get a document +To get a document, use the `get` API action. The following code gets the document with ID `1` from the `movies` index: + +```python +client.get(index=index, id=1)['_source'] +# OUTPUT: {"title"=>"Beauty and the Beast","year"=>1991} +``` + +You can also use `_source_includes` and `_source_excludes` parameters to specify which fields to include or exclude in the response: + +```python +client.get(index=index, id=1, _source_includes=['title'])['_source'] +# OUTPUT: {"title": "Beauty and the Beast"} + +client.get(index=index, id=1, _source_excludes=['title'])['_source'] +# OUTPUT: {"year": 1991} +``` + +### Get multiple documents +To get multiple documents, use the `mget` API action: + +```python +client.mget(index=index, body={ 'docs': [{ '_id': 1 }, { '_id': 2 }] })['docs'] +``` + +### Check if a document exists +To check if a document exists, use the `exists` API action. The following code checks if the document with ID `1` exists in the `movies` index: + +```python +client.exists(index=index, id=1) +``` + +### Update a document +To update a document, use the `update` API action. The following code updates the `year` field of the document with ID `1` in the `movies` index: + +```python +client.update(index=index, id=1, body={'doc': {'year': 1995}}) +``` + +Alternatively, you can use the `script` parameter to update a document using a script. The following code increments the `year` field of the of document with ID `1` by 5 using painless script, the default scripting language in OpenSearch: + +```python +client.update(index=index, id=1, body={ 'script': { 'source': 'ctx._source.year += 5' } }) +``` + +Note that while both `update` and `index` actions perform updates, they are not the same. The `update` action is a partial update, while the `index` action is a full update. The `update` action only updates the fields that are specified in the request body, while the `index` action overwrites the entire document with the new document. + +### Update multiple documents by query + +To update documents that match a query, use the `update_by_query` API action. The following code decreases the `year` field of all documents with `year` greater than 2023: + +```python +client.update_by_query(index=index, body={ + 'script': { 'source': 'ctx._source.year -= 1' }, + 'query': { 'range': { 'year': { 'gt': 2023 } } } +}) +``` + +### Delete a document +To delete a document, use the `delete` API action. The following code deletes the document with ID `1`: + +```python +client.delete(index=index, id=1) +``` + +By default, the `delete` action is not idempotent. If you try to delete a document that does not exist, or delete the same document twice, you will run into Not Found (404) error. You can make the `delete` action idempotent by setting the `ignore` parameter to `404`: + +```python +client.delete(index=index, id=1, ignore=404) +``` + +### Delete multiple documents by query +To delete documents that match a query, use the `delete_by_query` API action. The following code deletes all documents with `year` greater than 2023: + +```python +client.delete_by_query(index=index, body={ + 'query': { 'range': { 'year': { 'gt': 2023 } } } +}) +``` + +## Cleanup +To clean up the resources created in this guide, delete the `movies` index: + +```python +client.indices.delete(index=index) +``` + +# Sample Code +See [document_lifecycle_sample.py](/samples/document_lifecycle/document_lifecycle_sample.py) for a working sample of the concepts in this guide. \ No newline at end of file diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py new file mode 100644 index 00000000..1d338da7 --- /dev/null +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from opensearchpy import OpenSearch + +# For cleaner output, comment in the two lines below to disable warnings and informational messages +# import urllib3 +# urllib3.disable_warnings() + + +# Connect to OpenSearch +client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), +) + +# Create an index +index = "movies" +if not client.indices.exists(index=index): + client.indices.create(index=index) + +# Create documents +client.index(index=index, id=1, body={"title": "Beauty and the Beast", "year": 1991}) +client.index( + index=index, + id=2, + body={"title": "Beauty and the Beast - Live Action", "year": 2017}, +) + +# Index a document +client.index(index=index, id=2, body={"title": "The Lion King", "year": 1994}) + +# Create a document with auto-generated ID +result = client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) +print(result) + +# Get a document +result = client.get(index=index, id=1)["_source"] +print(result) + +# Get a document with _source includes +result = client.get(index=index, id=1, _source_includes=["title"])["_source"] +print(result) + +# Get a document with _source excludes +result = client.get(index=index, id=1, _source_excludes=["title"])["_source"] +print(result) + +# Get multiple documents +result = client.mget(index=index, body={"docs": [{"_id": 1}, {"_id": 2}]})["docs"] +print(result) + +# Check if a document exists +result = client.exists(index=index, id=1) +print(result) + +# Update a document +client.update(index=index, id=1, body={"doc": {"year": 1995}}) + +# Update a document using script +client.update(index=index, id=1, body={"script": {"source": "ctx._source.year += 5"}}) + +# Update multiple documents by query +client.update_by_query( + index=index, + body={ + "script": {"source": "ctx._source.year -= 1"}, + "query": {"range": {"year": {"gt": 2023}}}, + }, +) + +# Delete a document +client.delete(index=index, id=1) + +# Delete a document with ignore 404 +client.delete(index=index, id=1, ignore=404) + +# Delete multiple documents by query +client.delete_by_query(index=index, body={"query": {"range": {"year": {"gt": 2023}}}}) + +# Delete the index +client.indices.delete(index=index) +print("Deleted index!") From 7b36528ee77434112d66a36cebb1be7b4e563336 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Mon, 6 Nov 2023 13:08:19 -0500 Subject: [PATCH 41/80] Merge `.pyi` type stubs inline (#563) * Merged types into .py code. Signed-off-by: dblock * Fix: nox -rs generate. Signed-off-by: dblock * Updated CHANGELOG. Signed-off-by: dblock * Use lowest common python version for lint. Signed-off-by: dblock * Fix: don't typeshed. Signed-off-by: dblock * Removed unneeded comment. Signed-off-by: dblock * Simplify OPENSEARCH_URL. Signed-off-by: dblock * Fix: positional ignore_status used as chunk_size. Signed-off-by: dblock * Fix: parse version string. Signed-off-by: dblock * Remove future annotations for Python 3.6. Signed-off-by: dblock * Fix: types in documentation. Signed-off-by: dblock * Improve CHANGELOG text. Signed-off-by: dblock * Re-added missing separator. Signed-off-by: dblock * Remove duplicate licenses. Signed-off-by: dblock * Get rid of Optional[Any]. Signed-off-by: dblock * Fix docs with AsyncOpenSearch. Signed-off-by: dblock * Fix: undo comment. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + DEVELOPER_GUIDE.md | 4 +- MANIFEST.in | 2 +- benchmarks/bench_async.py | 16 +- benchmarks/bench_info_sync.py | 10 +- benchmarks/bench_sync.py | 10 +- noxfile.py | 23 +- opensearchpy/__init__.py | 16 +- opensearchpy/__init__.pyi | 132 -- opensearchpy/_async/client/__init__.py | 348 ++++- opensearchpy/_async/client/__init__.pyi | 1139 ----------------- opensearchpy/_async/client/_patch.py | 23 +- opensearchpy/_async/client/_patch.pyi | 71 - opensearchpy/_async/client/cat.py | 465 ++++--- opensearchpy/_async/client/cat.pyi | 601 --------- opensearchpy/_async/client/client.py | 42 + opensearchpy/_async/client/cluster.py | 147 ++- opensearchpy/_async/client/cluster.pyi | 456 ------- .../_async/client/dangling_indices.py | 22 +- .../_async/client/dangling_indices.pyi | 99 -- opensearchpy/_async/client/features.py | 6 +- opensearchpy/_async/client/features.pyi | 66 - opensearchpy/_async/client/indices.py | 365 +++++- opensearchpy/_async/client/indices.pyi | 1097 ---------------- opensearchpy/_async/client/ingest.py | 38 +- opensearchpy/_async/client/ingest.pyi | 136 -- opensearchpy/_async/client/nodes.py | 42 +- opensearchpy/_async/client/nodes.pyi | 149 --- opensearchpy/_async/client/plugins.py | 9 +- opensearchpy/_async/client/plugins.pyi | 19 - opensearchpy/_async/client/remote.py | 4 +- opensearchpy/_async/client/remote.pyi | 46 - opensearchpy/_async/client/remote_store.py | 10 +- opensearchpy/_async/client/remote_store.pyi | 42 - opensearchpy/_async/client/security.py | 332 ++++- opensearchpy/_async/client/security.pyi | 821 ------------ opensearchpy/_async/client/snapshot.py | 90 +- opensearchpy/_async/client/snapshot.pyi | 272 ---- opensearchpy/_async/client/tasks.py | 21 +- opensearchpy/_async/client/tasks.pyi | 104 -- opensearchpy/_async/client/utils.py | 10 + opensearchpy/_async/client/utils.pyi | 41 - opensearchpy/_async/compat.py | 2 +- opensearchpy/_async/compat.pyi | 30 - opensearchpy/_async/helpers/actions.py | 137 +- opensearchpy/_async/helpers/actions.pyi | 115 -- opensearchpy/_async/helpers/document.py | 132 +- opensearchpy/_async/helpers/document.pyi | 15 - opensearchpy/_async/helpers/faceted_search.py | 42 +- .../_async/helpers/faceted_search.pyi | 11 - opensearchpy/_async/helpers/index.py | 125 +- opensearchpy/_async/helpers/index.pyi | 12 - opensearchpy/_async/helpers/mapping.py | 50 +- opensearchpy/_async/helpers/mapping.pyi | 11 - opensearchpy/_async/helpers/search.py | 71 +- opensearchpy/_async/helpers/search.pyi | 14 - opensearchpy/_async/helpers/test.py | 10 +- opensearchpy/_async/helpers/test.pyi | 20 - .../_async/helpers/update_by_query.py | 24 +- .../_async/helpers/update_by_query.pyi | 13 - opensearchpy/_async/http_aiohttp.py | 94 +- opensearchpy/_async/http_aiohttp.pyi | 73 -- opensearchpy/_async/plugins/__init__.pyi | 9 - opensearchpy/_async/plugins/alerting.py | 86 +- opensearchpy/_async/plugins/alerting.pyi | 83 -- .../_async/plugins/index_management.py | 34 +- .../_async/plugins/index_management.pyi | 72 -- opensearchpy/_async/transport.py | 84 +- opensearchpy/_async/transport.pyi | 91 -- opensearchpy/_version.py | 2 +- opensearchpy/client/__init__.py | 348 ++++- opensearchpy/client/__init__.pyi | 1139 ----------------- opensearchpy/client/_patch.py | 21 +- opensearchpy/client/_patch.pyi | 71 - opensearchpy/client/cat.py | 465 ++++--- opensearchpy/client/cat.pyi | 601 --------- opensearchpy/client/client.py | 42 + opensearchpy/client/cluster.py | 147 ++- opensearchpy/client/cluster.pyi | 456 ------- opensearchpy/client/dangling_indices.py | 22 +- opensearchpy/client/dangling_indices.pyi | 99 -- opensearchpy/client/features.py | 6 +- opensearchpy/client/features.pyi | 66 - opensearchpy/client/indices.py | 365 +++++- opensearchpy/client/indices.pyi | 1097 ---------------- opensearchpy/client/ingest.py | 38 +- opensearchpy/client/ingest.pyi | 136 -- opensearchpy/client/nodes.py | 42 +- opensearchpy/client/nodes.pyi | 149 --- opensearchpy/client/plugins.py | 9 +- opensearchpy/client/plugins.pyi | 19 - opensearchpy/client/remote.py | 4 +- opensearchpy/client/remote.pyi | 46 - opensearchpy/client/remote_store.py | 10 +- opensearchpy/client/remote_store.pyi | 42 - opensearchpy/client/security.py | 332 ++++- opensearchpy/client/security.pyi | 821 ------------ opensearchpy/client/snapshot.py | 90 +- opensearchpy/client/snapshot.pyi | 272 ---- opensearchpy/client/tasks.py | 21 +- opensearchpy/client/tasks.pyi | 104 -- opensearchpy/client/utils.py | 29 +- opensearchpy/client/utils.pyi | 68 - opensearchpy/compat.py | 14 +- opensearchpy/compat.pyi | 54 - opensearchpy/connection/__init__.pyi | 32 - opensearchpy/connection/async_connections.py | 18 +- opensearchpy/connection/async_connections.pyi | 11 - opensearchpy/connection/base.py | 136 +- opensearchpy/connection/base.pyi | 119 -- opensearchpy/connection/connections.py | 18 +- opensearchpy/connection/connections.pyi | 30 - opensearchpy/connection/http_async.py | 66 +- opensearchpy/connection/http_async.pyi | 38 - opensearchpy/connection/http_requests.py | 65 +- opensearchpy/connection/http_requests.pyi | 51 - opensearchpy/connection/http_urllib3.py | 69 +- opensearchpy/connection/http_urllib3.pyi | 65 - opensearchpy/connection/pooling.py | 14 +- opensearchpy/connection/pooling.pyi | 34 - opensearchpy/connection_pool.py | 73 +- opensearchpy/connection_pool.pyi | 86 -- opensearchpy/exceptions.py | 27 +- opensearchpy/exceptions.pyi | 64 - opensearchpy/helpers/__init__.pyi | 46 - opensearchpy/helpers/actions.py | 172 +-- opensearchpy/helpers/actions.pyi | 137 -- opensearchpy/helpers/aggs.py | 65 +- opensearchpy/helpers/aggs.pyi | 105 -- opensearchpy/helpers/analysis.py | 75 +- opensearchpy/helpers/analysis.pyi | 50 - opensearchpy/helpers/asyncsigner.py | 16 +- opensearchpy/helpers/asyncsigner.pyi | 19 - opensearchpy/helpers/document.py | 126 +- opensearchpy/helpers/document.pyi | 37 - opensearchpy/helpers/errors.py | 12 +- opensearchpy/helpers/errors.pyi | 38 - opensearchpy/helpers/faceted_search.py | 99 +- opensearchpy/helpers/faceted_search.pyi | 37 - opensearchpy/helpers/field.py | 218 ++-- opensearchpy/helpers/field.pyi | 71 - opensearchpy/helpers/function.py | 18 +- opensearchpy/helpers/function.pyi | 41 - opensearchpy/helpers/index.py | 128 +- opensearchpy/helpers/index.pyi | 29 - opensearchpy/helpers/mapping.py | 65 +- opensearchpy/helpers/mapping.pyi | 31 - opensearchpy/helpers/query.py | 47 +- opensearchpy/helpers/query.pyi | 96 -- opensearchpy/helpers/response/__init__.py | 41 +- opensearchpy/helpers/response/__init__.pyi | 32 - opensearchpy/helpers/response/aggs.py | 21 +- opensearchpy/helpers/response/aggs.pyi | 35 - opensearchpy/helpers/response/hit.py | 15 +- opensearchpy/helpers/response/hit.pyi | 30 - opensearchpy/helpers/search.py | 153 ++- opensearchpy/helpers/search.pyi | 36 - opensearchpy/helpers/signer.py | 11 +- opensearchpy/helpers/test.py | 21 +- opensearchpy/helpers/test.pyi | 44 - opensearchpy/helpers/update_by_query.py | 24 +- opensearchpy/helpers/update_by_query.pyi | 30 - opensearchpy/helpers/utils.py | 160 +-- opensearchpy/helpers/utils.pyi | 33 - opensearchpy/helpers/wrappers.py | 16 +- opensearchpy/helpers/wrappers.pyi | 30 - opensearchpy/plugins/__init__.pyi | 9 - opensearchpy/plugins/alerting.py | 50 +- opensearchpy/plugins/alerting.pyi | 73 -- opensearchpy/plugins/index_management.py | 30 +- opensearchpy/plugins/index_management.pyi | 72 -- opensearchpy/serializer.py | 36 +- opensearchpy/serializer.pyi | 56 - opensearchpy/transport.py | 111 +- opensearchpy/transport.pyi | 95 -- .../advanced_index_actions_sample.py | 2 +- setup.py | 9 +- test_opensearchpy/TestHttpServer.py | 6 +- test_opensearchpy/run_tests.py | 4 +- .../test_async/test_connection.py | 77 +- .../test_async/test_helpers/conftest.py | 3 +- .../test_async/test_helpers/test_document.py | 69 +- .../test_helpers/test_faceted_search.py | 11 +- .../test_async/test_helpers/test_index.py | 23 +- .../test_async/test_helpers/test_mapping.py | 10 +- .../test_async/test_helpers/test_search.py | 45 +- .../test_helpers/test_update_by_query.py | 11 +- .../test_async/test_http_connection.py | 13 +- .../test_async/test_plugins_client.py | 2 +- .../test_async/test_server/__init__.py | 4 +- .../test_async/test_server/conftest.py | 3 +- .../test_async/test_server/test_clients.py | 11 +- .../test_server/test_helpers/conftest.py | 2 +- .../test_server/test_helpers/test_actions.py | 42 +- .../test_server/test_helpers/test_data.py | 6 +- .../test_server/test_helpers/test_document.py | 72 +- .../test_helpers/test_faceted_search.py | 21 +- .../test_server/test_helpers/test_index.py | 15 +- .../test_server/test_helpers/test_mapping.py | 9 +- .../test_server/test_helpers/test_search.py | 19 +- .../test_helpers/test_update_by_query.py | 9 +- .../test_server/test_plugins/test_alerting.py | 11 +- .../test_plugins/test_index_management.py | 11 +- .../test_server/test_rest_api_spec.py | 19 +- .../test_security_plugin.py | 35 +- test_opensearchpy/test_async/test_signer.py | 15 +- .../test_async/test_transport.py | 62 +- test_opensearchpy/test_cases.py | 14 +- test_opensearchpy/test_client/__init__.py | 39 +- test_opensearchpy/test_client/test_cluster.py | 6 +- test_opensearchpy/test_client/test_indices.py | 8 +- .../test_client/test_overrides.py | 30 +- .../test_client/test_plugins/test_alerting.py | 24 +- .../test_plugins/test_index_management.py | 18 +- .../test_plugins/test_plugins_client.py | 2 +- .../test_client/test_point_in_time.py | 16 +- .../test_client/test_remote_store.py | 2 +- .../test_client/test_requests.py | 4 +- test_opensearchpy/test_client/test_urllib3.py | 6 +- test_opensearchpy/test_client/test_utils.py | 32 +- .../test_connection/test_base_connection.py | 28 +- .../test_requests_http_connection.py | 82 +- .../test_urllib3_http_connection.py | 58 +- test_opensearchpy/test_connection_pool.py | 26 +- test_opensearchpy/test_exceptions.py | 4 +- .../test_helpers/test_actions.py | 18 +- test_opensearchpy/test_helpers/test_aggs.py | 36 +- .../test_helpers/test_analysis.py | 10 +- .../test_helpers/test_document.py | 66 +- .../test_helpers/test_faceted_search.py | 8 +- test_opensearchpy/test_helpers/test_field.py | 32 +- test_opensearchpy/test_helpers/test_index.py | 22 +- .../test_helpers/test_mapping.py | 10 +- test_opensearchpy/test_helpers/test_query.py | 106 +- test_opensearchpy/test_helpers/test_result.py | 34 +- test_opensearchpy/test_helpers/test_search.py | 50 +- .../test_helpers/test_update_by_query.py | 10 +- test_opensearchpy/test_helpers/test_utils.py | 20 +- .../test_helpers/test_validation.py | 22 +- .../test_helpers/test_wrappers.py | 8 +- test_opensearchpy/test_serializer.py | 50 +- test_opensearchpy/test_server/__init__.py | 2 +- test_opensearchpy/test_server/test_clients.py | 6 +- .../test_server/test_helpers/conftest.py | 5 +- .../test_server/test_helpers/test_actions.py | 43 +- .../test_server/test_helpers/test_analysis.py | 6 +- .../test_server/test_helpers/test_count.py | 6 +- .../test_server/test_helpers/test_data.py | 6 +- .../test_server/test_helpers/test_document.py | 68 +- .../test_helpers/test_faceted_search.py | 18 +- .../test_server/test_helpers/test_index.py | 12 +- .../test_server/test_helpers/test_mapping.py | 8 +- .../test_server/test_helpers/test_search.py | 18 +- .../test_helpers/test_update_by_query.py | 6 +- .../test_server/test_plugins/test_alerting.py | 8 +- .../test_plugins/test_index_management.py | 8 +- .../test_server/test_rest_api_spec.py | 46 +- .../test_server_secured/test_clients.py | 2 +- .../test_security_plugin.py | 32 +- test_opensearchpy/test_transport.py | 50 +- test_opensearchpy/utils.py | 24 +- utils/build-dists.py | 16 +- utils/generate-api.py | 61 +- utils/license-headers.py | 4 +- utils/templates/base | 4 +- utils/templates/base_pyi | 2 - utils/templates/func_params | 11 +- utils/templates/func_params_pyi | 26 - 268 files changed, 6001 insertions(+), 16161 deletions(-) delete mode 100644 opensearchpy/__init__.pyi delete mode 100644 opensearchpy/_async/client/__init__.pyi delete mode 100644 opensearchpy/_async/client/_patch.pyi delete mode 100644 opensearchpy/_async/client/cat.pyi create mode 100644 opensearchpy/_async/client/client.py delete mode 100644 opensearchpy/_async/client/cluster.pyi delete mode 100644 opensearchpy/_async/client/dangling_indices.pyi delete mode 100644 opensearchpy/_async/client/features.pyi delete mode 100644 opensearchpy/_async/client/indices.pyi delete mode 100644 opensearchpy/_async/client/ingest.pyi delete mode 100644 opensearchpy/_async/client/nodes.pyi delete mode 100644 opensearchpy/_async/client/plugins.pyi delete mode 100644 opensearchpy/_async/client/remote.pyi delete mode 100644 opensearchpy/_async/client/remote_store.pyi delete mode 100644 opensearchpy/_async/client/security.pyi delete mode 100644 opensearchpy/_async/client/snapshot.pyi delete mode 100644 opensearchpy/_async/client/tasks.pyi delete mode 100644 opensearchpy/_async/client/utils.pyi delete mode 100644 opensearchpy/_async/compat.pyi delete mode 100644 opensearchpy/_async/helpers/actions.pyi delete mode 100644 opensearchpy/_async/helpers/document.pyi delete mode 100644 opensearchpy/_async/helpers/faceted_search.pyi delete mode 100644 opensearchpy/_async/helpers/index.pyi delete mode 100644 opensearchpy/_async/helpers/mapping.pyi delete mode 100644 opensearchpy/_async/helpers/search.pyi delete mode 100644 opensearchpy/_async/helpers/test.pyi delete mode 100644 opensearchpy/_async/helpers/update_by_query.pyi delete mode 100644 opensearchpy/_async/http_aiohttp.pyi delete mode 100644 opensearchpy/_async/plugins/__init__.pyi delete mode 100644 opensearchpy/_async/plugins/alerting.pyi delete mode 100644 opensearchpy/_async/plugins/index_management.pyi delete mode 100644 opensearchpy/_async/transport.pyi delete mode 100644 opensearchpy/client/__init__.pyi delete mode 100644 opensearchpy/client/_patch.pyi delete mode 100644 opensearchpy/client/cat.pyi create mode 100644 opensearchpy/client/client.py delete mode 100644 opensearchpy/client/cluster.pyi delete mode 100644 opensearchpy/client/dangling_indices.pyi delete mode 100644 opensearchpy/client/features.pyi delete mode 100644 opensearchpy/client/indices.pyi delete mode 100644 opensearchpy/client/ingest.pyi delete mode 100644 opensearchpy/client/nodes.pyi delete mode 100644 opensearchpy/client/plugins.pyi delete mode 100644 opensearchpy/client/remote.pyi delete mode 100644 opensearchpy/client/remote_store.pyi delete mode 100644 opensearchpy/client/security.pyi delete mode 100644 opensearchpy/client/snapshot.pyi delete mode 100644 opensearchpy/client/tasks.pyi delete mode 100644 opensearchpy/client/utils.pyi delete mode 100644 opensearchpy/compat.pyi delete mode 100644 opensearchpy/connection/__init__.pyi delete mode 100644 opensearchpy/connection/async_connections.pyi delete mode 100644 opensearchpy/connection/base.pyi delete mode 100644 opensearchpy/connection/connections.pyi delete mode 100644 opensearchpy/connection/http_async.pyi delete mode 100644 opensearchpy/connection/http_requests.pyi delete mode 100644 opensearchpy/connection/http_urllib3.pyi delete mode 100644 opensearchpy/connection/pooling.pyi delete mode 100644 opensearchpy/connection_pool.pyi delete mode 100644 opensearchpy/exceptions.pyi delete mode 100644 opensearchpy/helpers/__init__.pyi delete mode 100644 opensearchpy/helpers/actions.pyi delete mode 100644 opensearchpy/helpers/aggs.pyi delete mode 100644 opensearchpy/helpers/analysis.pyi delete mode 100644 opensearchpy/helpers/asyncsigner.pyi delete mode 100644 opensearchpy/helpers/document.pyi delete mode 100644 opensearchpy/helpers/errors.pyi delete mode 100644 opensearchpy/helpers/faceted_search.pyi delete mode 100644 opensearchpy/helpers/field.pyi delete mode 100644 opensearchpy/helpers/function.pyi delete mode 100644 opensearchpy/helpers/index.pyi delete mode 100644 opensearchpy/helpers/mapping.pyi delete mode 100644 opensearchpy/helpers/query.pyi delete mode 100644 opensearchpy/helpers/response/__init__.pyi delete mode 100644 opensearchpy/helpers/response/aggs.pyi delete mode 100644 opensearchpy/helpers/response/hit.pyi delete mode 100644 opensearchpy/helpers/search.pyi delete mode 100644 opensearchpy/helpers/test.pyi delete mode 100644 opensearchpy/helpers/update_by_query.pyi delete mode 100644 opensearchpy/helpers/utils.pyi delete mode 100644 opensearchpy/helpers/wrappers.pyi delete mode 100644 opensearchpy/plugins/__init__.pyi delete mode 100644 opensearchpy/plugins/alerting.pyi delete mode 100644 opensearchpy/plugins/index_management.pyi delete mode 100644 opensearchpy/serializer.pyi delete mode 100644 opensearchpy/transport.pyi delete mode 100644 utils/templates/base_pyi delete mode 100644 utils/templates/func_params_pyi diff --git a/CHANGELOG.md b/CHANGELOG.md index f2d503a5..b4bc0a02 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) - Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) - Use API generator for all APIs ([#551](https://github.com/opensearch-project/opensearch-py/pull/551)) +- Merge `.pyi` type stubs inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index dd1fad26..f6cb568c 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -66,9 +66,11 @@ If you want to auto-start one, the following will start a new instance and run t If your OpenSearch docker instance is running, you can execute the test suite directly. ``` -$ nox -rs test +$ nox -rs test-3.9 ``` +Substitute `3.9` with your Python version above, or use `nox -rs test` to run with multiple. + To run tests against different versions of OpenSearch, use `run-tests [with/without security] [version]`: ``` diff --git a/MANIFEST.in b/MANIFEST.in index 40d49135..9f446e08 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -5,7 +5,7 @@ include LICENSE include MANIFEST.in include README.md include setup.py -recursive-include opensearch* py.typed *.pyi +recursive-include opensearch* py.typed prune test_opensearch recursive-exclude * __pycache__ diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index c7eb5714..a27a126c 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -22,7 +22,7 @@ item_count = 100 -async def index_records(client, item_count): +async def index_records(client, item_count) -> None: await asyncio.gather( *[ client.index( @@ -71,34 +71,34 @@ async def test_async(client_count=1, item_count=1): await asyncio.gather(*[client.close() for client in clients]) -def test(item_count=1, client_count=1): +def test(item_count: int = 1, client_count: int = 1) -> None: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(test_async(item_count, client_count)) loop.close() -def test_1(): +def test_1() -> None: test(1, 32 * item_count) -def test_2(): +def test_2() -> None: test(2, 16 * item_count) -def test_4(): +def test_4() -> None: test(4, 8 * item_count) -def test_8(): +def test_8() -> None: test(8, 4 * item_count) -def test_16(): +def test_16() -> None: test(16, 2 * item_count) -def test_32(): +def test_32() -> None: test(32, item_count) diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 229a2e4d..29b289cd 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -75,23 +75,23 @@ def test(thread_count=1, request_count=1, client_count=1): print(f"latency={latency}") -def test_1(): +def test_1() -> None: test(1, 32 * request_count, 1) -def test_2(): +def test_2() -> None: test(2, 16 * request_count, 2) -def test_4(): +def test_4() -> None: test(4, 8 * request_count, 3) -def test_8(): +def test_8() -> None: test(8, 4 * request_count, 8) -def test_32(): +def test_32() -> None: test(32, request_count, 32) diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index e201eaba..83225ef9 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -112,23 +112,23 @@ def test(thread_count=1, item_count=1, client_count=1): print(f"{count}, latency={latency}") -def test_1(): +def test_1() -> None: test(1, 32 * item_count, 1) -def test_2(): +def test_2() -> None: test(2, 16 * item_count, 2) -def test_4(): +def test_4() -> None: test(4, 8 * item_count, 3) -def test_8(): +def test_8() -> None: test(8, 4 * item_count, 8) -def test_32(): +def test_32() -> None: test(32, item_count, 32) diff --git a/noxfile.py b/noxfile.py index a9cd9068..3b97ad03 100644 --- a/noxfile.py +++ b/noxfile.py @@ -41,7 +41,7 @@ @nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) -def test(session): +def test(session) -> None: session.install(".") session.install("-r", "dev-requirements.txt") @@ -49,7 +49,7 @@ def test(session): @nox.session() -def format(session): +def format(session) -> None: session.install("black", "isort") session.run("isort", "--profile=black", *SOURCE_FILES) @@ -59,9 +59,18 @@ def format(session): lint(session) -@nox.session() -def lint(session): - session.install("flake8", "black", "mypy", "isort", "types-requests", "types-six") +@nox.session(python=["3.7"]) +def lint(session) -> None: + session.install( + "flake8", + "black", + "mypy", + "isort", + "types-requests", + "types-six", + "types-simplejson", + "types-python-dateutil", + ) session.run("isort", "--check", "--profile=black", *SOURCE_FILES) session.run("black", "--target-version=py33", "--check", *SOURCE_FILES) @@ -85,7 +94,7 @@ def lint(session): @nox.session() -def docs(session): +def docs(session) -> None: session.install(".") session.install( "-rdev-requirements.txt", "sphinx-rtd-theme", "sphinx-autodoc-typehints" @@ -94,7 +103,7 @@ def docs(session): @nox.session() -def generate(session): +def generate(session) -> None: session.install("-rdev-requirements.txt") session.run("python", "utils/generate-api.py") format(session) diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 8116d60a..3dcd7389 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -31,21 +31,25 @@ import logging import re -import sys import warnings from ._version import __versionstr__ _major, _minor, _patch = [ - int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() + int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore ] + VERSION = __version__ = (_major, _minor, _patch) logger = logging.getLogger("opensearch") logger.addHandler(logging.NullHandler()) +from ._async.client import AsyncOpenSearch +from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection +from ._async.transport import AsyncTransport from .client import OpenSearch from .connection import ( + AsyncHttpConnection, Connection, RequestsHttpConnection, Urllib3HttpConnection, @@ -247,14 +251,6 @@ "normalizer", "token_filter", "tokenizer", -] - -from ._async.client import AsyncOpenSearch -from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection -from ._async.transport import AsyncTransport -from .connection import AsyncHttpConnection - -__all__ += [ "AIOHttpConnection", "AsyncConnection", "AsyncTransport", diff --git a/opensearchpy/__init__.pyi b/opensearchpy/__init__.pyi deleted file mode 100644 index 96c17075..00000000 --- a/opensearchpy/__init__.pyi +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys -from typing import Tuple - -from ._async.client import AsyncOpenSearch as AsyncOpenSearch -from ._async.http_aiohttp import AIOHttpConnection as AIOHttpConnection -from ._async.http_aiohttp import AsyncConnection as AsyncConnection -from ._async.transport import AsyncTransport as AsyncTransport -from .client import OpenSearch as OpenSearch -from .connection import AsyncHttpConnection as AsyncHttpConnection -from .connection import Connection as Connection -from .connection import RequestsHttpConnection as RequestsHttpConnection -from .connection import Urllib3HttpConnection as Urllib3HttpConnection -from .connection import connections as connections -from .connection_pool import ConnectionPool as ConnectionPool -from .connection_pool import ConnectionSelector as ConnectionSelector -from .connection_pool import RoundRobinSelector as RoundRobinSelector -from .exceptions import AuthenticationException as AuthenticationException -from .exceptions import AuthorizationException as AuthorizationException -from .exceptions import ConflictError as ConflictError -from .exceptions import ConnectionError as ConnectionError -from .exceptions import ConnectionTimeout as ConnectionTimeout -from .exceptions import IllegalOperation as IllegalOperation -from .exceptions import ImproperlyConfigured as ImproperlyConfigured -from .exceptions import NotFoundError as NotFoundError -from .exceptions import OpenSearchDeprecationWarning as OpenSearchDeprecationWarning -from .exceptions import OpenSearchDslException as OpenSearchDslException -from .exceptions import OpenSearchException as OpenSearchException -from .exceptions import OpenSearchWarning as OpenSearchWarning -from .exceptions import RequestError as RequestError -from .exceptions import SerializationError as SerializationError -from .exceptions import SSLError as SSLError -from .exceptions import TransportError as TransportError -from .exceptions import UnknownDslObject as UnknownDslObject -from .exceptions import ValidationException as ValidationException -from .helpers import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth -from .helpers import AWSV4SignerAuth as AWSV4SignerAuth -from .helpers.aggs import A as A -from .helpers.analysis import Analyzer, CharFilter, Normalizer, TokenFilter, Tokenizer -from .helpers.document import Document as Document -from .helpers.document import InnerDoc as InnerDoc -from .helpers.document import MetaField as MetaField -from .helpers.faceted_search import DateHistogramFacet as DateHistogramFacet -from .helpers.faceted_search import Facet as Facet -from .helpers.faceted_search import FacetedResponse as FacetedResponse -from .helpers.faceted_search import FacetedSearch as FacetedSearch -from .helpers.faceted_search import HistogramFacet as HistogramFacet -from .helpers.faceted_search import NestedFacet as NestedFacet -from .helpers.faceted_search import RangeFacet as RangeFacet -from .helpers.faceted_search import TermsFacet as TermsFacet -from .helpers.field import Binary as Binary -from .helpers.field import Boolean as Boolean -from .helpers.field import Byte as Byte -from .helpers.field import Completion as Completion -from .helpers.field import CustomField as CustomField -from .helpers.field import Date as Date -from .helpers.field import DateRange as DateRange -from .helpers.field import DenseVector as DenseVector -from .helpers.field import Double as Double -from .helpers.field import DoubleRange as DoubleRange -from .helpers.field import Field as Field -from .helpers.field import Float as Float -from .helpers.field import FloatRange as FloatRange -from .helpers.field import GeoPoint as GeoPoint -from .helpers.field import GeoShape as GeoShape -from .helpers.field import HalfFloat as HalfFloat -from .helpers.field import Integer as Integer -from .helpers.field import IntegerRange as IntegerRange -from .helpers.field import Ip as Ip -from .helpers.field import IpRange as IpRange -from .helpers.field import Join as Join -from .helpers.field import Keyword as Keyword -from .helpers.field import Long as Long -from .helpers.field import LongRange as LongRange -from .helpers.field import Murmur3 as Murmur3 -from .helpers.field import Nested as Nested -from .helpers.field import Object as Object -from .helpers.field import Percolator as Percolator -from .helpers.field import RangeField as RangeField -from .helpers.field import RankFeature as RankFeature -from .helpers.field import RankFeatures as RankFeatures -from .helpers.field import ScaledFloat as ScaledFloat -from .helpers.field import SearchAsYouType as SearchAsYouType -from .helpers.field import Short as Short -from .helpers.field import SparseVector as SparseVector -from .helpers.field import Text as Text -from .helpers.field import TokenCount as TokenCount -from .helpers.field import construct_field as construct_field -from .helpers.function import SF as SF -from .helpers.index import Index as Index -from .helpers.index import IndexTemplate as IndexTemplate -from .helpers.mapping import Mapping as Mapping -from .helpers.query import Q as Q -from .helpers.search import MultiSearch as MultiSearch -from .helpers.search import Search as Search -from .helpers.update_by_query import UpdateByQuery as UpdateByQuery -from .helpers.utils import AttrDict as AttrDict -from .helpers.utils import AttrList as AttrList -from .helpers.utils import DslBase as DslBase -from .helpers.wrappers import Range as Range -from .serializer import JSONSerializer as JSONSerializer -from .transport import Transport as Transport - -VERSION: Tuple[int, int, int] -__version__: Tuple[int, int, int] -__versionstr__: str diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index a7587f82..279fda37 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -39,9 +39,11 @@ from __future__ import unicode_literals import logging +from typing import Any, Type from ..transport import AsyncTransport, TransportError from .cat import CatClient +from .client import Client from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient @@ -54,12 +56,12 @@ from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient -from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params +from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") -class AsyncOpenSearch(object): +class AsyncOpenSearch(Client): """ OpenSearch client. Provides a straightforward mapping from Python to OpenSearch REST endpoints. @@ -184,13 +186,19 @@ def default(self, obj): """ - from ._patch import ( + # include PIT functions inside _patch.py + from ._patch import ( # type: ignore create_point_in_time, delete_point_in_time, list_all_point_in_time, ) - def __init__(self, hosts=None, transport_class=AsyncTransport, **kwargs): + def __init__( + self, + hosts: Any = None, + transport_class: Type[AsyncTransport] = AsyncTransport, + **kwargs: Any + ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), @@ -205,7 +213,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be :class:`~opensearchpy.Transport` class and, subsequently, to the :class:`~opensearchpy.Connection` instances. """ - self.transport = transport_class(_normalize_hosts(hosts), **kwargs) + super().__init__(hosts, transport_class, **kwargs) # namespaced clients for compatibility with API names self.cat = CatClient(self) @@ -224,10 +232,10 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.plugins = PluginsClient(self) - def __repr__(self): + def __repr__(self) -> Any: try: # get a list of all connections - cons = self.transport.hosts + cons: Any = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] @@ -236,21 +244,25 @@ def __repr__(self): # probably operating on custom transport and connection_pool, ignore return super(AsyncOpenSearch, self).__repr__() - async def __aenter__(self): + async def __aenter__(self) -> Any: if hasattr(self.transport, "_async_call"): await self.transport._async_call() return self - async def __aexit__(self, *_): + async def __aexit__(self, *_: Any) -> None: await self.close() - async def close(self): + async def close(self) -> None: """Closes the Transport and all internal connections""" await self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params() - async def ping(self, params=None, headers=None): + async def ping( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns whether the cluster is running. @@ -263,7 +275,11 @@ async def ping(self, params=None, headers=None): return False @query_params() - async def info(self, params=None, headers=None): + async def info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the cluster. @@ -281,7 +297,14 @@ async def info(self, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def create(self, index, id, body, params=None, headers=None): + async def create( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. @@ -330,7 +353,14 @@ async def create(self, index, id, body, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def index(self, index, body, id=None, params=None, headers=None): + async def index( + self, + index: Any, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a document in an index. @@ -387,7 +417,13 @@ async def index(self, index, body, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def bulk(self, body, index=None, params=None, headers=None): + async def bulk( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to perform multiple index/update/delete operations in a single request. @@ -431,7 +467,13 @@ async def bulk(self, body, index=None, params=None, headers=None): ) @query_params() - async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): + async def clear_scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Explicitly clears the search context for a scroll. @@ -467,7 +509,13 @@ async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=Non "routing", "terminate_after", ) - async def count(self, body=None, index=None, params=None, headers=None): + async def count( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns number of documents matching a query. @@ -523,7 +571,13 @@ async def count(self, body=None, index=None, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def delete(self, index, id, params=None, headers=None): + async def delete( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes a document from the index. @@ -592,7 +646,13 @@ async def delete(self, index, id, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def delete_by_query(self, index, body, params=None, headers=None): + async def delete_by_query( + self, + index: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes documents matching the provided query. @@ -685,7 +745,12 @@ async def delete_by_query(self, index, body, params=None, headers=None): ) @query_params("requests_per_second") - async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): + async def delete_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Delete By Query operation. @@ -706,7 +771,12 @@ async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_script(self, id, params=None, headers=None): + async def delete_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a script. @@ -738,7 +808,13 @@ async def delete_script(self, id, params=None, headers=None): "version", "version_type", ) - async def exists(self, index, id, params=None, headers=None): + async def exists( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document exists in an index. @@ -783,7 +859,13 @@ async def exists(self, index, id, params=None, headers=None): "version", "version_type", ) - async def exists_source(self, index, id, params=None, headers=None): + async def exists_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document source exists in an index. @@ -831,7 +913,14 @@ async def exists_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - async def explain(self, index, id, body=None, params=None, headers=None): + async def explain( + self, + index: Any, + id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about why a specific matches (or doesn't match) a query. @@ -878,7 +967,13 @@ async def explain(self, index, id, body=None, params=None, headers=None): "ignore_unavailable", "include_unmapped", ) - async def field_caps(self, body=None, index=None, params=None, headers=None): + async def field_caps( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about the capabilities of fields among multiple indices. @@ -919,7 +1014,13 @@ async def field_caps(self, body=None, index=None, params=None, headers=None): "version", "version_type", ) - async def get(self, index, id, params=None, headers=None): + async def get( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a document. @@ -954,7 +1055,12 @@ async def get(self, index, id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout") - async def get_script(self, id, params=None, headers=None): + async def get_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a script. @@ -984,7 +1090,13 @@ async def get_script(self, id, params=None, headers=None): "version", "version_type", ) - async def get_source(self, index, id, params=None, headers=None): + async def get_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the source of a document. @@ -1028,7 +1140,13 @@ async def get_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - async def mget(self, body, index=None, params=None, headers=None): + async def mget( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to get multiple documents in one request. @@ -1073,7 +1191,13 @@ async def mget(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def msearch(self, body, index=None, params=None, headers=None): + async def msearch( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search operations in one request. @@ -1125,7 +1249,13 @@ async def msearch(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def msearch_template(self, body, index=None, params=None, headers=None): + async def msearch_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search template operations in one request. @@ -1173,7 +1303,13 @@ async def msearch_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - async def mtermvectors(self, body=None, index=None, params=None, headers=None): + async def mtermvectors( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns multiple termvectors in one request. @@ -1221,7 +1357,14 @@ async def mtermvectors(self, body=None, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def put_script(self, id, body, context=None, params=None, headers=None): + async def put_script( + self, + id: Any, + body: Any, + context: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a script. @@ -1251,7 +1394,13 @@ async def put_script(self, id, body, context=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" ) - async def rank_eval(self, body, index=None, params=None, headers=None): + async def rank_eval( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to evaluate the quality of ranked search results over a set of typical search queries. @@ -1293,7 +1442,12 @@ async def rank_eval(self, body, index=None, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def reindex(self, body, params=None, headers=None): + async def reindex( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or @@ -1330,7 +1484,12 @@ async def reindex(self, body, params=None, headers=None): ) @query_params("requests_per_second") - async def reindex_rethrottle(self, task_id, params=None, headers=None): + async def reindex_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Reindex operation. @@ -1351,8 +1510,12 @@ async def reindex_rethrottle(self, task_id, params=None, headers=None): @query_params() async def render_search_template( - self, body=None, id=None, params=None, headers=None - ): + self, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1369,7 +1532,12 @@ async def render_search_template( ) @query_params() - async def scripts_painless_execute(self, body=None, params=None, headers=None): + async def scripts_painless_execute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows an arbitrary script to be executed and a result to be returned. @@ -1385,7 +1553,13 @@ async def scripts_painless_execute(self, body=None, params=None, headers=None): ) @query_params("rest_total_hits_as_int", "scroll") - async def scroll(self, body=None, scroll_id=None, params=None, headers=None): + async def scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to retrieve a large numbers of results from a single search request. @@ -1454,7 +1628,13 @@ async def scroll(self, body=None, scroll_id=None, params=None, headers=None): "typed_keys", "version", ) - async def search(self, body=None, index=None, params=None, headers=None): + async def search( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns results matching a query. @@ -1574,7 +1754,12 @@ async def search(self, body=None, index=None, params=None, headers=None): "preference", "routing", ) - async def search_shards(self, index=None, params=None, headers=None): + async def search_shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the indices and shards that a search request would be executed against. @@ -1615,7 +1800,13 @@ async def search_shards(self, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def search_template(self, body, index=None, params=None, headers=None): + async def search_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1677,7 +1868,14 @@ async def search_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - async def termvectors(self, index, body=None, id=None, params=None, headers=None): + async def termvectors( + self, + index: Any, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information and statistics about terms in the fields of a particular document. @@ -1732,7 +1930,14 @@ async def termvectors(self, index, body=None, id=None, params=None, headers=None "timeout", "wait_for_active_shards", ) - async def update(self, index, id, body, params=None, headers=None): + async def update( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a document with a script or partial document. @@ -1814,7 +2019,13 @@ async def update(self, index, id, body, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def update_by_query(self, index, body=None, params=None, headers=None): + async def update_by_query( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. @@ -1908,7 +2119,12 @@ async def update_by_query(self, index, body=None, params=None, headers=None): ) @query_params("requests_per_second") - async def update_by_query_rethrottle(self, task_id, params=None, headers=None): + async def update_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Update By Query operation. @@ -1929,7 +2145,11 @@ async def update_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - async def get_script_context(self, params=None, headers=None): + async def get_script_context( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all script contexts. @@ -1939,7 +2159,11 @@ async def get_script_context(self, params=None, headers=None): ) @query_params() - async def get_script_languages(self, params=None, headers=None): + async def get_script_languages( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns available script types, languages and contexts. @@ -1955,7 +2179,12 @@ async def get_script_languages(self, params=None, headers=None): "preference", "routing", ) - async def create_pit(self, index, params=None, headers=None): + async def create_pit( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates point in time context. @@ -1983,7 +2212,11 @@ async def create_pit(self, index, params=None, headers=None): ) @query_params() - async def delete_all_pits(self, params=None, headers=None): + async def delete_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes all active point in time searches. @@ -1993,7 +2226,12 @@ async def delete_all_pits(self, params=None, headers=None): ) @query_params() - async def delete_pit(self, body=None, params=None, headers=None): + async def delete_pit( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes one or more point in time searches based on the IDs passed. @@ -2009,7 +2247,11 @@ async def delete_pit(self, body=None, params=None, headers=None): ) @query_params() - async def get_all_pits(self, params=None, headers=None): + async def get_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Lists all active point in time searches. diff --git a/opensearchpy/_async/client/__init__.pyi b/opensearchpy/_async/client/__init__.pyi deleted file mode 100644 index 32ea967e..00000000 --- a/opensearchpy/_async/client/__init__.pyi +++ /dev/null @@ -1,1139 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from __future__ import unicode_literals - -import logging -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -from ..transport import AsyncTransport -from .cat import CatClient -from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient -from .features import FeaturesClient -from .indices import IndicesClient -from .ingest import IngestClient -from .nodes import NodesClient -from .remote import RemoteClient -from .remote_store import RemoteStoreClient -from .security import SecurityClient -from .snapshot import SnapshotClient -from .tasks import TasksClient - -logger: logging.Logger - -class AsyncOpenSearch(object): - transport: AsyncTransport - - cat: CatClient - cluster: ClusterClient - features: FeaturesClient - indices: IndicesClient - ingest: IngestClient - nodes: NodesClient - remote: RemoteClient - security: SecurityClient - snapshot: SnapshotClient - tasks: TasksClient - remote_store: RemoteStoreClient - def __init__( - self, - hosts: Any = ..., - transport_class: Type[AsyncTransport] = ..., - **kwargs: Any, - ) -> None: ... - def __repr__(self) -> str: ... - async def __aenter__(self) -> "AsyncOpenSearch": ... - async def __aexit__(self, *_: Any) -> None: ... - async def close(self) -> None: ... - # AUTO-GENERATED-API-DEFINITIONS # - async def ping( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create( - self, - index: Any, - id: Any, - *, - body: Any, - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def index( - self, - index: Any, - *, - body: Any, - id: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - op_type: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def bulk( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clear_scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def count( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - min_score: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - index: Any, - id: Any, - *, - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_by_query( - self, - index: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def exists_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def explain( - self, - index: Any, - id: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - lenient: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def field_caps( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_unmapped: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def mget( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def msearch( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def msearch_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def mtermvectors( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - ids: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_script( - self, - id: Any, - *, - body: Any, - context: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def rank_eval( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - search_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reindex( - self, - *, - body: Any, - max_docs: Optional[Any] = ..., - refresh: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - scroll: Optional[Any] = ..., - slices: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reindex_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def render_search_template( - self, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def scripts_painless_execute( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - scroll: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - allow_partial_search_results: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - batched_reduce_size: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - docvalue_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - seq_no_primary_term: Optional[Any] = ..., - size: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - suggest_field: Optional[Any] = ..., - suggest_mode: Optional[Any] = ..., - suggest_size: Optional[Any] = ..., - suggest_text: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - track_scores: Optional[Any] = ..., - track_total_hits: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - version: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search_shards( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - preference: Optional[Any] = ..., - profile: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def termvectors( - self, - index: Any, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update( - self, - index: Any, - id: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - lang: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - retry_on_conflict: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_by_query( - self, - index: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script_context( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script_languages( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_pit( - self, - index: Any, - *, - allow_partial_pit_creation: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_pit( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/_patch.py b/opensearchpy/_async/client/_patch.py index f3a953c0..cbf24e0b 100644 --- a/opensearchpy/_async/client/_patch.py +++ b/opensearchpy/_async/client/_patch.py @@ -9,12 +9,15 @@ # GitHub history for details. import warnings +from typing import Any from .utils import SKIP_IN_PATH, query_params @query_params() -async def list_all_point_in_time(self, params=None, headers=None): +async def list_all_point_in_time( + self: Any, params: Any = None, headers: Any = None +) -> Any: """ Returns the list of active point in times searches @@ -35,7 +38,9 @@ async def list_all_point_in_time(self, params=None, headers=None): @query_params( "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" ) -async def create_point_in_time(self, index, params=None, headers=None): +async def create_point_in_time( + self: Any, index: Any, params: Any = None, headers: Any = None +) -> Any: """ Create a point in time that can be used in subsequent searches @@ -68,7 +73,13 @@ async def create_point_in_time(self, index, params=None, headers=None): @query_params() -async def delete_point_in_time(self, body=None, all=False, params=None, headers=None): +async def delete_point_in_time( + self: Any, + body: Any = None, + all: bool = False, + params: Any = None, + headers: Any = None, +) -> Any: """ Delete a point in time @@ -94,7 +105,7 @@ async def delete_point_in_time(self, body=None, all=False, params=None, headers= @query_params() -async def health_check(self, params=None, headers=None): +async def health_check(self: Any, params: Any = None, headers: Any = None) -> Any: """ Checks to see if the Security plugin is up and running. @@ -113,7 +124,9 @@ async def health_check(self, params=None, headers=None): @query_params() -async def update_audit_config(self, body, params=None, headers=None): +async def update_audit_config( + self: Any, body: Any, params: Any = None, headers: Any = None +) -> Any: """ A PUT call updates the audit configuration. diff --git a/opensearchpy/_async/client/_patch.pyi b/opensearchpy/_async/client/_patch.pyi deleted file mode 100644 index d49a7fec..00000000 --- a/opensearchpy/_async/client/_patch.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -async def list_all_point_in_time( - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -async def create_point_in_time( - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -async def delete_point_in_time( - *, - body: Optional[Any] = ..., - all: Optional[bool] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -async def health_check( - params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... -async def update_audit_config( - body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index 2c2b01c0..4310511c 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -36,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") - async def aliases(self, name=None, params=None, headers=None): + async def aliases( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Shows information about currently configured aliases to indices including filter and routing infos. @@ -65,6 +72,20 @@ async def aliases(self, name=None, params=None, headers=None): "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) + @query_params() + async def all_pit_segments( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Lists all active point-in-time segments. + + """ + return await self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + @query_params( "bytes", "cluster_manager_timeout", @@ -76,7 +97,12 @@ async def aliases(self, name=None, params=None, headers=None): "s", "v", ) - async def allocation(self, node_id=None, params=None, headers=None): + async def allocation( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. @@ -108,8 +134,51 @@ async def allocation(self, node_id=None, params=None, headers=None): headers=headers, ) + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + async def cluster_manager( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about the cluster-manager node. + + + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return await self.transport.perform_request( + "GET", "/_cat/cluster_manager", params=params, headers=headers + ) + @query_params("format", "h", "help", "s", "v") - async def count(self, index=None, params=None, headers=None): + async def count( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides quick access to the document count of the entire cluster, or individual indices. @@ -129,8 +198,43 @@ async def count(self, index=None, params=None, headers=None): "GET", _make_path("_cat", "count", index), params=params, headers=headers ) + @query_params("bytes", "format", "h", "help", "s", "v") + async def fielddata( + self, + fields: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Shows how much heap memory is currently being used by fielddata on every data + node in the cluster. + + + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return await self.transport.perform_request( + "GET", + _make_path("_cat", "fielddata", fields), + params=params, + headers=headers, + ) + @query_params("format", "h", "help", "s", "time", "ts", "v") - async def health(self, params=None, headers=None): + async def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a concise representation of the cluster health. @@ -151,7 +255,11 @@ async def health(self, params=None, headers=None): ) @query_params("help", "s") - async def help(self, params=None, headers=None): + async def help( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns help for the Cat APIs. @@ -180,7 +288,12 @@ async def help(self, params=None, headers=None): "time", "v", ) - async def indices(self, index=None, params=None, headers=None): + async def indices( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... @@ -232,7 +345,11 @@ async def indices(self, index=None, params=None, headers=None): "s", "v", ) - async def master(self, params=None, headers=None): + async def master( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the cluster-manager node. @@ -271,9 +388,13 @@ async def master(self, params=None, headers=None): "s", "v", ) - async def cluster_manager(self, params=None, headers=None): + async def nodeattrs( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about the cluster-manager node. + Returns information about custom node attributes. :arg cluster_manager_timeout: Operation timeout for connection @@ -292,7 +413,7 @@ async def cluster_manager(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/cluster_manager", params=params, headers=headers + "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( @@ -308,7 +429,11 @@ async def cluster_manager(self, params=None, headers=None): "time", "v", ) - async def nodes(self, params=None, headers=None): + async def nodes( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic statistics about performance of cluster nodes. @@ -340,37 +465,6 @@ async def nodes(self, params=None, headers=None): ) @query_params( - "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" - ) - async def recovery(self, index=None, params=None, headers=None): - """ - Returns information about index shard recoveries, both on-going completed. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - shard recoveries. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg detailed: If `true`, the response includes detailed - information about shard recoveries. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return await self.transport.perform_request( - "GET", _make_path("_cat", "recovery", index), params=params, headers=headers - ) - - @query_params( - "bytes", "cluster_manager_timeout", "format", "h", @@ -381,15 +475,15 @@ async def recovery(self, index=None, params=None, headers=None): "time", "v", ) - async def shards(self, index=None, params=None, headers=None): + async def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides a detailed view of shard allocation on nodes. + Returns a concise representation of the cluster pending tasks. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -408,34 +502,52 @@ async def shards(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", _make_path("_cat", "shards", index), params=params, headers=headers + "GET", "/_cat/pending_tasks", params=params, headers=headers + ) + + @query_params() + async def pit_segments( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + List segments for one or several PITs. + + + """ + return await self.transport.perform_request( + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body ) @query_params( - "bytes", "cluster_manager_timeout", "format", "h", "help", + "local", "master_timeout", "s", "v", ) - async def segments(self, index=None, params=None, headers=None): + async def plugins( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides low-level information about the segments in the shards of an index. + Returns information about installed plugins across nodes node. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -444,36 +556,34 @@ async def segments(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", _make_path("_cat", "segments", index), params=params, headers=headers + "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( - "cluster_manager_timeout", - "format", - "h", - "help", - "local", - "master_timeout", - "s", - "time", - "v", + "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" ) - async def pending_tasks(self, params=None, headers=None): + async def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns a concise representation of the cluster pending tasks. + Returns information about index shard recoveries, both on-going completed. - :arg cluster_manager_timeout: Operation timeout for connection - to cluster-manager node. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg detailed: If `true`, the response includes detailed + information about shard recoveries. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. - :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead.): Operation timeout for - connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. :arg time: The unit in which to display time values. Valid @@ -481,7 +591,7 @@ async def pending_tasks(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/pending_tasks", params=params, headers=headers + "GET", _make_path("_cat", "recovery", index), params=params, headers=headers ) @query_params( @@ -492,17 +602,17 @@ async def pending_tasks(self, params=None, headers=None): "local", "master_timeout", "s", - "size", "v", ) - async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): + async def repositories( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns cluster-wide thread pool statistics per node. By default the active, - queue and rejected statistics are returned for all thread pools. + Returns information about snapshot repositories registered in the cluster. - :arg thread_pool_patterns: Comma-separated list of regular- - expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -516,65 +626,94 @@ async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", - _make_path("_cat", "thread_pool", thread_pool_patterns), - params=params, - headers=headers, + "GET", "/_cat/repositories", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - async def fielddata(self, fields=None, params=None, headers=None): + @query_params( + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", + ) + async def segment_replication( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Shows how much heap memory is currently being used by fielddata on every data - node in the cluster. + Returns information about both on-going and latest completed Segment + Replication events. - :arg fields: Comma-separated list of fields to return in the - output. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. Default is false. :arg bytes: The unit in which to display byte values. Valid choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. Default is false. + :arg detailed: If `true`, the response includes detailed + information about segment replications. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", - _make_path("_cat", "fielddata", fields), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", "help", - "local", "master_timeout", "s", "v", ) - async def plugins(self, params=None, headers=None): + async def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about installed plugins across nodes node. + Provides low-level information about the segments in the shards of an index. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -583,10 +722,11 @@ async def plugins(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/plugins", params=params, headers=headers + "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", @@ -594,13 +734,23 @@ async def plugins(self, params=None, headers=None): "local", "master_timeout", "s", + "time", "v", ) - async def nodeattrs(self, params=None, headers=None): + async def shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about custom node attributes. + Provides a detailed view of shard allocation on nodes. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -614,10 +764,12 @@ async def nodeattrs(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/nodeattrs", params=params, headers=headers + "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( @@ -628,13 +780,22 @@ async def nodeattrs(self, params=None, headers=None): "local", "master_timeout", "s", + "size", "v", ) - async def repositories(self, params=None, headers=None): + async def thread_pool( + self, + thread_pool_patterns: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about snapshot repositories registered in the cluster. + Returns cluster-wide thread pool statistics per node. By default the active, + queue and rejected statistics are returned for all thread pools. + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -648,10 +809,14 @@ async def repositories(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/repositories", params=params, headers=headers + "GET", + _make_path("_cat", "thread_pool", thread_pool_patterns), + params=params, + headers=headers, ) @query_params( @@ -665,7 +830,12 @@ async def repositories(self, params=None, headers=None): "time", "v", ) - async def snapshots(self, repository=None, params=None, headers=None): + async def snapshots( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all snapshots in a specific repository. @@ -708,7 +878,11 @@ async def snapshots(self, repository=None, params=None, headers=None): "time", "v", ) - async def tasks(self, params=None, headers=None): + async def tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in the cluster. @@ -748,7 +922,12 @@ async def tasks(self, params=None, headers=None): "s", "v", ) - async def templates(self, name=None, params=None, headers=None): + async def templates( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about existing templates. @@ -772,71 +951,3 @@ async def templates(self, name=None, params=None, headers=None): return await self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) - - @query_params() - async def all_pit_segments(self, params=None, headers=None): - """ - Lists all active point-in-time segments. - - """ - return await self.transport.perform_request( - "GET", "/_cat/pit_segments/_all", params=params, headers=headers - ) - - @query_params() - async def pit_segments(self, body=None, params=None, headers=None): - """ - List segments for one or several PITs. - - - """ - return await self.transport.perform_request( - "GET", "/_cat/pit_segments", params=params, headers=headers, body=body - ) - - @query_params( - "active_only", - "bytes", - "completed_only", - "detailed", - "format", - "h", - "help", - "s", - "shards", - "time", - "v", - ) - async def segment_replication(self, index=None, params=None, headers=None): - """ - Returns information about both on-going and latest completed Segment - Replication events. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - segment replication events. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg completed_only: If `true`, the response only includes - latest completed segment replication events. Default is false. - :arg detailed: If `true`, the response includes detailed - information about segment replications. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg shards: Comma-separated list of shards to display. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return await self.transport.perform_request( - "GET", - _make_path("_cat", "segment_replication", index), - params=params, - headers=headers, - ) diff --git a/opensearchpy/_async/client/cat.pyi b/opensearchpy/_async/client/cat.pyi deleted file mode 100644 index 404400cd..00000000 --- a/opensearchpy/_async/client/cat.pyi +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class CatClient(NamespacedClient): - async def aliases( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def allocation( - self, - *, - node_id: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def count( - self, - *, - index: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def health( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - ts: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def help( - self, - *, - help: Optional[Any] = ..., - s: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def indices( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - health: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pri: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def master( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cluster_manager( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def nodes( - self, - *, - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - full_id: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shards( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segments( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def thread_pool( - self, - *, - thread_pool_patterns: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def fielddata( - self, - *, - fields: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def plugins( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def nodeattrs( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def repositories( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def snapshots( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def tasks( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def templates( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def all_pit_segments( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pit_segments( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segment_replication( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - completed_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - shards: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/client.py b/opensearchpy/_async/client/client.py new file mode 100644 index 00000000..7f0b67c6 --- /dev/null +++ b/opensearchpy/_async/client/client.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Optional, Type + +from opensearchpy.client.utils import _normalize_hosts +from opensearchpy.transport import Transport + + +class Client(object): + """ + A generic async OpenSearch client. + """ + + def __init__( + self, + hosts: Optional[str] = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: + """ + :arg hosts: list of nodes, or a single node, we should connect to. + Node should be a dictionary ({"host": "localhost", "port": 9200}), + the entire dictionary will be passed to the :class:`~opensearchpy.Connection` + class as kwargs, or a string in the format of ``host[:port]`` which will be + translated to a dictionary automatically. If no value is given the + :class:`~opensearchpy.Connection` class defaults will be used. + + :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. + + :arg kwargs: any additional arguments will be passed on to the + :class:`~opensearchpy.Transport` class and, subsequently, to the + :class:`~opensearchpy.Connection` instances. + """ + self.transport = transport_class(_normalize_hosts(hosts), **kwargs) diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 8bd55390..905853e9 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -55,7 +57,12 @@ class ClusterClient(NamespacedClient): "wait_for_nodes", "wait_for_status", ) - async def health(self, index=None, params=None, headers=None): + async def health( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the health of the cluster. @@ -99,7 +106,11 @@ async def health(self, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def pending_tasks(self, params=None, headers=None): + async def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. @@ -128,7 +139,13 @@ async def pending_tasks(self, params=None, headers=None): "wait_for_metadata_version", "wait_for_timeout", ) - async def state(self, metric=None, index=None, params=None, headers=None): + async def state( + self, + metric: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a comprehensive information about the state of the cluster. @@ -171,7 +188,12 @@ async def state(self, metric=None, index=None, params=None, headers=None): ) @query_params("flat_settings", "timeout") - async def stats(self, node_id=None, params=None, headers=None): + async def stats( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns high-level overview of cluster statistics. @@ -202,7 +224,12 @@ async def stats(self, node_id=None, params=None, headers=None): "retry_failed", "timeout", ) - async def reroute(self, body=None, params=None, headers=None): + async def reroute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. @@ -235,7 +262,11 @@ async def reroute(self, body=None, params=None, headers=None): "master_timeout", "timeout", ) - async def get_settings(self, params=None, headers=None): + async def get_settings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns cluster settings. @@ -258,7 +289,12 @@ async def get_settings(self, params=None, headers=None): @query_params( "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) - async def put_settings(self, body, params=None, headers=None): + async def put_settings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster settings. @@ -282,7 +318,11 @@ async def put_settings(self, body, params=None, headers=None): ) @query_params() - async def remote_info(self, params=None, headers=None): + async def remote_info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about configured remote clusters. @@ -292,7 +332,12 @@ async def remote_info(self, params=None, headers=None): ) @query_params("include_disk_info", "include_yes_decisions") - async def allocation_explain(self, body=None, params=None, headers=None): + async def allocation_explain( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -313,7 +358,12 @@ async def allocation_explain(self, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_component_template(self, name, params=None, headers=None): + async def delete_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a component template. @@ -337,7 +387,12 @@ async def delete_component_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def get_component_template(self, name=None, params=None, headers=None): + async def get_component_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns one or more component templates. @@ -359,7 +414,13 @@ async def get_component_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") - async def put_component_template(self, name, body, params=None, headers=None): + async def put_component_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a component template. @@ -388,7 +449,12 @@ async def put_component_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def exists_component_template(self, name, params=None, headers=None): + async def exists_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular component template exist. @@ -413,7 +479,11 @@ async def exists_component_template(self, name, params=None, headers=None): ) @query_params("wait_for_removal") - async def delete_voting_config_exclusions(self, params=None, headers=None): + async def delete_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears cluster voting config exclusions. @@ -430,7 +500,11 @@ async def delete_voting_config_exclusions(self, params=None, headers=None): ) @query_params("node_ids", "node_names", "timeout") - async def post_voting_config_exclusions(self, params=None, headers=None): + async def post_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. @@ -448,7 +522,11 @@ async def post_voting_config_exclusions(self, params=None, headers=None): ) @query_params() - async def delete_decommission_awareness(self, params=None, headers=None): + async def delete_decommission_awareness( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete any existing decommission. @@ -461,7 +539,11 @@ async def delete_decommission_awareness(self, params=None, headers=None): ) @query_params() - async def delete_weighted_routing(self, params=None, headers=None): + async def delete_weighted_routing( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete weighted shard routing weights. @@ -475,8 +557,11 @@ async def delete_weighted_routing(self, params=None, headers=None): @query_params() async def get_decommission_awareness( - self, awareness_attribute_name, params=None, headers=None - ): + self, + awareness_attribute_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Get details and status of decommissioned attribute. @@ -502,7 +587,12 @@ async def get_decommission_awareness( ) @query_params() - async def get_weighted_routing(self, attribute, params=None, headers=None): + async def get_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Fetches weighted shard routing weights. @@ -522,11 +612,11 @@ async def get_weighted_routing(self, attribute, params=None, headers=None): @query_params() async def put_decommission_awareness( self, - awareness_attribute_name, - awareness_attribute_value, - params=None, - headers=None, - ): + awareness_attribute_name: Any, + awareness_attribute_value: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Decommissions an awareness attribute. @@ -552,7 +642,12 @@ async def put_decommission_awareness( ) @query_params() - async def put_weighted_routing(self, attribute, params=None, headers=None): + async def put_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates weighted shard routing weights. diff --git a/opensearchpy/_async/client/cluster.pyi b/opensearchpy/_async/client/cluster.pyi deleted file mode 100644 index 74f88694..00000000 --- a/opensearchpy/_async/client/cluster.pyi +++ /dev/null @@ -1,456 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class ClusterClient(NamespacedClient): - async def health( - self, - *, - index: Optional[Any] = ..., - awareness_attribute: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - level: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_events: Optional[Any] = ..., - wait_for_no_initializing_shards: Optional[Any] = ..., - wait_for_no_relocating_shards: Optional[Any] = ..., - wait_for_nodes: Optional[Any] = ..., - wait_for_status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def state( - self, - *, - metric: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_metadata_version: Optional[Any] = ..., - wait_for_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - node_id: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reroute( - self, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - explain: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - metric: Optional[Any] = ..., - retry_failed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_settings( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_settings( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def remote_info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def allocation_explain( - self, - *, - body: Optional[Any] = ..., - include_disk_info: Optional[Any] = ..., - include_yes_decisions: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_component_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_component_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def delete_voting_config_exclusions( - self, - *, - wait_for_removal: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def post_voting_config_exclusions( - self, - *, - node_ids: Optional[Any] = ..., - node_names: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_decommission_awareness( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_weighted_routing( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_decommission_awareness( - self, - awareness_attribute_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_decommission_awareness( - self, - awareness_attribute_name: Any, - awareness_attribute_value: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index bc886d65..6bc9a343 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -43,7 +45,12 @@ class DanglingIndicesClient(NamespacedClient): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - async def delete_dangling_index(self, index_uuid, params=None, headers=None): + async def delete_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes the specified dangling index. @@ -71,7 +78,12 @@ async def delete_dangling_index(self, index_uuid, params=None, headers=None): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - async def import_dangling_index(self, index_uuid, params=None, headers=None): + async def import_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Imports the specified dangling index. @@ -94,7 +106,11 @@ async def import_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params() - async def list_dangling_indices(self, params=None, headers=None): + async def list_dangling_indices( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/_async/client/dangling_indices.pyi b/opensearchpy/_async/client/dangling_indices.pyi deleted file mode 100644 index d9dea8a1..00000000 --- a/opensearchpy/_async/client/dangling_indices.pyi +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class DanglingIndicesClient(NamespacedClient): - async def delete_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def import_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def list_dangling_indices( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/features.py b/opensearchpy/_async/client/features.py index e2c1bb7d..1b69aa04 100644 --- a/opensearchpy/_async/client/features.py +++ b/opensearchpy/_async/client/features.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params("master_timeout", "cluster_manager_timeout") - async def get_features(self, params=None, headers=None): + async def get_features(self, params: Any = None, headers: Any = None) -> Any: """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot @@ -47,7 +49,7 @@ async def get_features(self, params=None, headers=None): ) @query_params() - async def reset_features(self, params=None, headers=None): + async def reset_features(self, params: Any = None, headers: Any = None) -> Any: """ Resets the internal state of features, usually by deleting system indices diff --git a/opensearchpy/_async/client/features.pyi b/opensearchpy/_async/client/features.pyi deleted file mode 100644 index 38fb992e..00000000 --- a/opensearchpy/_async/client/features.pyi +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class FeaturesClient(NamespacedClient): - async def get_features( - self, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reset_features( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index b83cb73c..a4ef8b5b 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -36,12 +36,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params() - async def analyze(self, body=None, index=None, params=None, headers=None): + async def analyze( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the text. @@ -60,7 +68,12 @@ async def analyze(self, body=None, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def refresh(self, index=None, params=None, headers=None): + async def refresh( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the refresh operation in one or more indices. @@ -87,7 +100,12 @@ async def refresh(self, index=None, params=None, headers=None): "ignore_unavailable", "wait_if_ongoing", ) - async def flush(self, index=None, params=None, headers=None): + async def flush( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the flush operation on one or more indices. @@ -119,7 +137,13 @@ async def flush(self, index=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - async def create(self, index, body=None, params=None, headers=None): + async def create( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates an index with optional settings and mappings. @@ -146,7 +170,14 @@ async def create(self, index, body=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - async def clone(self, index, target, body=None, params=None, headers=None): + async def clone( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clones an index. @@ -186,7 +217,12 @@ async def clone(self, index, target, body=None, params=None, headers=None): "local", "master_timeout", ) - async def get(self, index, params=None, headers=None): + async def get( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about one or more indices. @@ -229,7 +265,12 @@ async def get(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def open(self, index, params=None, headers=None): + async def open( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Opens an index. @@ -268,7 +309,12 @@ async def open(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def close(self, index, params=None, headers=None): + async def close( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Closes an index. @@ -306,7 +352,12 @@ async def close(self, index, params=None, headers=None): "master_timeout", "timeout", ) - async def delete(self, index, params=None, headers=None): + async def delete( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index. @@ -344,7 +395,12 @@ async def delete(self, index, params=None, headers=None): "include_defaults", "local", ) - async def exists(self, index, params=None, headers=None): + async def exists( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index exists. @@ -382,7 +438,13 @@ async def exists(self, index, params=None, headers=None): "timeout", "write_index_only", ) - async def put_mapping(self, body, index=None, params=None, headers=None): + async def put_mapping( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index mappings. @@ -429,7 +491,12 @@ async def put_mapping(self, body, index=None, params=None, headers=None): "local", "master_timeout", ) - async def get_mapping(self, index=None, params=None, headers=None): + async def get_mapping( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mappings for one or more indices. @@ -463,7 +530,13 @@ async def get_mapping(self, index=None, params=None, headers=None): "include_defaults", "local", ) - async def get_field_mapping(self, fields, index=None, params=None, headers=None): + async def get_field_mapping( + self, + fields: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mapping for one or more fields. @@ -494,7 +567,14 @@ async def get_field_mapping(self, fields, index=None, params=None, headers=None) ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def put_alias(self, index, name, body=None, params=None, headers=None): + async def put_alias( + self, + index: Any, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an alias. @@ -524,7 +604,13 @@ async def put_alias(self, index, name, body=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - async def exists_alias(self, name, index=None, params=None, headers=None): + async def exists_alias( + self, + name: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular alias exists. @@ -550,7 +636,13 @@ async def exists_alias(self, name, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - async def get_alias(self, index=None, name=None, params=None, headers=None): + async def get_alias( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an alias. @@ -573,7 +665,12 @@ async def get_alias(self, index=None, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def update_aliases(self, body, params=None, headers=None): + async def update_aliases( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates index aliases. @@ -594,7 +691,13 @@ async def update_aliases(self, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_alias(self, index, name, params=None, headers=None): + async def delete_alias( + self, + index: Any, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an alias. @@ -619,7 +722,13 @@ async def delete_alias(self, index, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "order") - async def put_template(self, name, body, params=None, headers=None): + async def put_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. @@ -650,7 +759,12 @@ async def put_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def exists_template(self, name, params=None, headers=None): + async def exists_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -674,7 +788,12 @@ async def exists_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def get_template(self, name=None, params=None, headers=None): + async def get_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. @@ -695,7 +814,12 @@ async def get_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_template(self, name, params=None, headers=None): + async def delete_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. @@ -725,7 +849,13 @@ async def delete_template(self, name, params=None, headers=None): "local", "master_timeout", ) - async def get_settings(self, index=None, name=None, params=None, headers=None): + async def get_settings( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns settings for one or more indices. @@ -767,7 +897,13 @@ async def get_settings(self, index=None, name=None, params=None, headers=None): "preserve_existing", "timeout", ) - async def put_settings(self, body, index=None, params=None, headers=None): + async def put_settings( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index settings. @@ -817,7 +953,13 @@ async def put_settings(self, body, index=None, params=None, headers=None): "include_unloaded_segments", "level", ) - async def stats(self, index=None, metric=None, params=None, headers=None): + async def stats( + self, + index: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in an index. @@ -858,7 +1000,12 @@ async def stats(self, index=None, metric=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose" ) - async def segments(self, index=None, params=None, headers=None): + async def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides low-level information about segments in a Lucene index. @@ -894,7 +1041,13 @@ async def segments(self, index=None, params=None, headers=None): "q", "rewrite", ) - async def validate_query(self, body=None, index=None, params=None, headers=None): + async def validate_query( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. @@ -943,7 +1096,12 @@ async def validate_query(self, body=None, index=None, params=None, headers=None) "query", "request", ) - async def clear_cache(self, index=None, params=None, headers=None): + async def clear_cache( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears all or specific caches for one or more indices. @@ -969,7 +1127,12 @@ async def clear_cache(self, index=None, params=None, headers=None): ) @query_params("active_only", "detailed") - async def recovery(self, index=None, params=None, headers=None): + async def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about ongoing index shard recoveries. @@ -992,7 +1155,12 @@ async def recovery(self, index=None, params=None, headers=None): "only_ancient_segments", "wait_for_completion", ) - async def upgrade(self, index=None, params=None, headers=None): + async def upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1017,7 +1185,12 @@ async def upgrade(self, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def get_upgrade(self, index=None, params=None, headers=None): + async def get_upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1040,7 +1213,12 @@ async def get_upgrade(self, index=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status" ) - async def shard_stores(self, index=None, params=None, headers=None): + async def shard_stores( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides store information for shard copies of indices. @@ -1070,7 +1248,12 @@ async def shard_stores(self, index=None, params=None, headers=None): "max_num_segments", "only_expunge_deletes", ) - async def forcemerge(self, index=None, params=None, headers=None): + async def forcemerge( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the force merge operation on one or more indices. @@ -1103,7 +1286,14 @@ async def forcemerge(self, index=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def shrink(self, index, target, body=None, params=None, headers=None): + async def shrink( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. @@ -1142,7 +1332,14 @@ async def shrink(self, index, target, body=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def split(self, index, target, body=None, params=None, headers=None): + async def split( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows you to split an existing index into a new index with more primary shards. @@ -1183,8 +1380,13 @@ async def split(self, index, target, body=None, params=None, headers=None): "wait_for_active_shards", ) async def rollover( - self, alias, body=None, new_index=None, params=None, headers=None - ): + self, + alias: Any, + body: Any = None, + new_index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. @@ -1219,7 +1421,13 @@ async def rollover( ) @query_params() - async def create_data_stream(self, name, body=None, params=None, headers=None): + async def create_data_stream( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a data stream. @@ -1239,7 +1447,12 @@ async def create_data_stream(self, name, body=None, params=None, headers=None): ) @query_params() - async def delete_data_stream(self, name, params=None, headers=None): + async def delete_data_stream( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a data stream. @@ -1255,7 +1468,12 @@ async def delete_data_stream(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_index_template(self, name, params=None, headers=None): + async def delete_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. @@ -1279,7 +1497,12 @@ async def delete_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def exists_index_template(self, name, params=None, headers=None): + async def exists_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -1303,7 +1526,12 @@ async def exists_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - async def get_index_template(self, name=None, params=None, headers=None): + async def get_index_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. @@ -1324,7 +1552,13 @@ async def get_index_template(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - async def put_index_template(self, name, body, params=None, headers=None): + async def put_index_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. @@ -1354,7 +1588,13 @@ async def put_index_template(self, name, body, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - async def simulate_index_template(self, name, body=None, params=None, headers=None): + async def simulate_index_template( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Simulate matching the given index name against the index templates in the system. @@ -1387,7 +1627,12 @@ async def simulate_index_template(self, name, body=None, params=None, headers=No ) @query_params() - async def get_data_stream(self, name=None, params=None, headers=None): + async def get_data_stream( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns data streams. @@ -1400,7 +1645,13 @@ async def get_data_stream(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - async def simulate_template(self, body=None, name=None, params=None, headers=None): + async def simulate_template( + self, + body: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Simulate resolving the given template name or body. @@ -1428,7 +1679,12 @@ async def simulate_template(self, body=None, name=None, params=None, headers=Non ) @query_params("expand_wildcards") - async def resolve_index(self, name, params=None, headers=None): + async def resolve_index( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about any matching indices, aliases, and data streams. @@ -1454,7 +1710,13 @@ async def resolve_index(self, name, params=None, headers=None): "master_timeout", "timeout", ) - async def add_block(self, index, block, params=None, headers=None): + async def add_block( + self, + index: Any, + block: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds a block to an index. @@ -1486,7 +1748,12 @@ async def add_block(self, index, block, params=None, headers=None): ) @query_params() - async def data_streams_stats(self, name=None, params=None, headers=None): + async def data_streams_stats( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in a data stream. diff --git a/opensearchpy/_async/client/indices.pyi b/opensearchpy/_async/client/indices.pyi deleted file mode 100644 index 1a5c0912..00000000 --- a/opensearchpy/_async/client/indices.pyi +++ /dev/null @@ -1,1097 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IndicesClient(NamespacedClient): - async def analyze( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def refresh( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def flush( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - force: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - wait_if_ongoing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create( - self, - index: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clone( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def open( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def close( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def put_mapping( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - write_index_only: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_mapping( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_field_mapping( - self, - fields: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_alias( - self, - index: Any, - name: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_alias( - self, - name: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_alias( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_aliases( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_alias( - self, - index: Any, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - order: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_settings( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_settings( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - preserve_existing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - index: Optional[Any] = ..., - metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - forbid_closed_indices: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segments( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def validate_query( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - all_shards: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - q: Optional[Any] = ..., - rewrite: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clear_cache( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - query: Optional[Any] = ..., - request: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - only_ancient_segments: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shard_stores( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def forcemerge( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - max_num_segments: Optional[Any] = ..., - only_expunge_deletes: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shrink( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def split( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def rollover( - self, - alias: Any, - *, - body: Optional[Any] = ..., - new_index: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_data_stream( - self, - name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_index_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_index_template( - self, - name: Any, - *, - body: Any, - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate_index_template( - self, - name: Any, - *, - body: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_data_stream( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate_template( - self, - *, - body: Optional[Any] = ..., - name: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def resolve_index( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def add_block( - self, - index: Any, - block: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def data_streams_stats( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index 0d56f7e1..2f8cff27 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -36,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") - async def get_pipeline(self, id=None, params=None, headers=None): + async def get_pipeline( + self, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a pipeline. @@ -59,7 +66,13 @@ async def get_pipeline(self, id=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def put_pipeline(self, id, body, params=None, headers=None): + async def put_pipeline( + self, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a pipeline. @@ -86,7 +99,12 @@ async def put_pipeline(self, id, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_pipeline(self, id, params=None, headers=None): + async def delete_pipeline( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a pipeline. @@ -110,7 +128,13 @@ async def delete_pipeline(self, id, params=None, headers=None): ) @query_params("verbose") - async def simulate(self, body, id=None, params=None, headers=None): + async def simulate( + self, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to simulate a pipeline with example documents. @@ -132,7 +156,11 @@ async def simulate(self, body, id=None, params=None, headers=None): ) @query_params() - async def processor_grok(self, params=None, headers=None): + async def processor_grok( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of the built-in patterns. diff --git a/opensearchpy/_async/client/ingest.pyi b/opensearchpy/_async/client/ingest.pyi deleted file mode 100644 index 9dd4fc2b..00000000 --- a/opensearchpy/_async/client/ingest.pyi +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IngestClient(NamespacedClient): - async def get_pipeline( - self, - *, - id: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_pipeline( - self, - id: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_pipeline( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate( - self, - *, - body: Any, - id: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def processor_grok( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index a89fee94..36146fad 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -36,14 +36,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params("timeout") async def reload_secure_settings( - self, body=None, node_id=None, params=None, headers=None - ): + self, + body: Any = None, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reloads secure settings. @@ -64,7 +70,13 @@ async def reload_secure_settings( ) @query_params("flat_settings", "timeout") - async def info(self, node_id=None, metric=None, params=None, headers=None): + async def info( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about nodes in the cluster. @@ -95,8 +107,13 @@ async def info(self, node_id=None, metric=None, params=None, headers=None): "types", ) async def stats( - self, node_id=None, metric=None, index_metric=None, params=None, headers=None - ): + self, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns statistical information about nodes in the cluster. @@ -140,7 +157,12 @@ async def stats( @query_params( "doc_type", "ignore_idle_threads", "interval", "snapshots", "threads", "timeout" ) - async def hot_threads(self, node_id=None, params=None, headers=None): + async def hot_threads( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about hot threads on each node in the cluster. @@ -173,7 +195,13 @@ async def hot_threads(self, node_id=None, params=None, headers=None): ) @query_params("timeout") - async def usage(self, node_id=None, metric=None, params=None, headers=None): + async def usage( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns low-level information about REST actions usage on nodes. diff --git a/opensearchpy/_async/client/nodes.pyi b/opensearchpy/_async/client/nodes.pyi deleted file mode 100644 index c18afb83..00000000 --- a/opensearchpy/_async/client/nodes.pyi +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class NodesClient(NamespacedClient): - async def reload_secure_settings( - self, - *, - body: Optional[Any] = ..., - node_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def info( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - index_metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - level: Optional[Any] = ..., - timeout: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def hot_threads( - self, - *, - node_id: Optional[Any] = ..., - doc_type: Optional[Any] = ..., - ignore_idle_threads: Optional[Any] = ..., - interval: Optional[Any] = ..., - snapshots: Optional[Any] = ..., - threads: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def usage( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/plugins.py b/opensearchpy/_async/client/plugins.py index 19570be4..b12214d7 100644 --- a/opensearchpy/_async/client/plugins.py +++ b/opensearchpy/_async/client/plugins.py @@ -9,14 +9,19 @@ # GitHub history for details. import warnings +from typing import Any from ..plugins.alerting import AlertingClient from ..plugins.index_management import IndexManagementClient +from .client import Client from .utils import NamespacedClient class PluginsClient(NamespacedClient): - def __init__(self, client): + alerting: Any + index_management: Any + + def __init__(self, client: Client) -> None: super(PluginsClient, self).__init__(client) # self.query_workbench = QueryWorkbenchClient(client) # self.reporting = ReportingClient(client) @@ -28,7 +33,7 @@ def __init__(self, client): self._dynamic_lookup(client) - def _dynamic_lookup(self, client): + def _dynamic_lookup(self, client: Any) -> None: # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 plugins = [ diff --git a/opensearchpy/_async/client/plugins.pyi b/opensearchpy/_async/client/plugins.pyi deleted file mode 100644 index 44576c74..00000000 --- a/opensearchpy/_async/client/plugins.pyi +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any - -from ..client import AsyncOpenSearch -from ..plugins.alerting import AlertingClient as AlertingClient -from .utils import NamespacedClient as NamespacedClient - -class PluginsClient(NamespacedClient): - alerting: Any - index_management: Any - def __init__(self, client: AsyncOpenSearch) -> None: ... diff --git a/opensearchpy/_async/client/remote.py b/opensearchpy/_async/client/remote.py index eee7319d..433c9fa5 100644 --- a/opensearchpy/_async/client/remote.py +++ b/opensearchpy/_async/client/remote.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() - async def info(self, params=None, headers=None): + async def info(self, params: Any = None, headers: Any = None) -> Any: return await self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) diff --git a/opensearchpy/_async/client/remote.pyi b/opensearchpy/_async/client/remote.pyi deleted file mode 100644 index a2d7dc51..00000000 --- a/opensearchpy/_async/client/remote.pyi +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteClient(NamespacedClient): - async def info( - self, - *, - timeout: Optional[Any] = None, - pretty: Optional[bool] = None, - human: Optional[bool] = None, - error_trace: Optional[bool] = None, - format: Optional[str] = None, - filter_path: Optional[Union[str, Collection[str]]] = None, - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, - ) -> Any: ... diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index e59d1870..8a72f41c 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -7,6 +7,7 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. + # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # @@ -17,12 +18,19 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, query_params class RemoteStoreClient(NamespacedClient): @query_params("cluster_manager_timeout", "wait_for_completion") - async def restore(self, body, params=None, headers=None): + async def restore( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Restores from remote store. diff --git a/opensearchpy/_async/client/remote_store.pyi b/opensearchpy/_async/client/remote_store.pyi deleted file mode 100644 index b14866ef..00000000 --- a/opensearchpy/_async/client/remote_store.pyi +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteStoreClient(NamespacedClient): - async def restore( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index 43265506..dc893f86 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -8,7 +8,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # @@ -19,14 +18,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): - from ._patch import health_check, update_audit_config + from ._patch import health_check, update_audit_config # type: ignore @query_params() - async def get_account_details(self, params=None, headers=None): + async def get_account_details( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns account details for the current user. @@ -36,7 +41,12 @@ async def get_account_details(self, params=None, headers=None): ) @query_params() - async def change_password(self, body, params=None, headers=None): + async def change_password( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the password for the current user. @@ -54,7 +64,12 @@ async def change_password(self, body, params=None, headers=None): ) @query_params() - async def get_action_group(self, action_group, params=None, headers=None): + async def get_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one action group. @@ -74,7 +89,11 @@ async def get_action_group(self, action_group, params=None, headers=None): ) @query_params() - async def get_action_groups(self, params=None, headers=None): + async def get_action_groups( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all action groups. @@ -87,7 +106,12 @@ async def get_action_groups(self, params=None, headers=None): ) @query_params() - async def delete_action_group(self, action_group, params=None, headers=None): + async def delete_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete a specified action group. @@ -107,7 +131,13 @@ async def delete_action_group(self, action_group, params=None, headers=None): ) @query_params() - async def create_action_group(self, action_group, body, params=None, headers=None): + async def create_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified action group. @@ -128,7 +158,13 @@ async def create_action_group(self, action_group, body, params=None, headers=Non ) @query_params() - async def patch_action_group(self, action_group, body, params=None, headers=None): + async def patch_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an action group. @@ -147,7 +183,12 @@ async def patch_action_group(self, action_group, body, params=None, headers=None ) @query_params() - async def patch_action_groups(self, body, params=None, headers=None): + async def patch_action_groups( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. @@ -165,7 +206,12 @@ async def patch_action_groups(self, body, params=None, headers=None): ) @query_params() - async def get_user(self, username, params=None, headers=None): + async def get_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieve one internal user. @@ -182,7 +228,11 @@ async def get_user(self, username, params=None, headers=None): ) @query_params() - async def get_users(self, params=None, headers=None): + async def get_users( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieve all internal users. @@ -195,7 +245,12 @@ async def get_users(self, params=None, headers=None): ) @query_params() - async def delete_user(self, username, params=None, headers=None): + async def delete_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete the specified user. @@ -212,7 +267,13 @@ async def delete_user(self, username, params=None, headers=None): ) @query_params() - async def create_user(self, username, body, params=None, headers=None): + async def create_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified user. @@ -231,7 +292,13 @@ async def create_user(self, username, body, params=None, headers=None): ) @query_params() - async def patch_user(self, username, body, params=None, headers=None): + async def patch_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an internal user. @@ -250,7 +317,12 @@ async def patch_user(self, username, body, params=None, headers=None): ) @query_params() - async def patch_users(self, body, params=None, headers=None): + async def patch_users( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. @@ -268,7 +340,12 @@ async def patch_users(self, body, params=None, headers=None): ) @query_params() - async def get_role(self, role, params=None, headers=None): + async def get_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role. @@ -285,7 +362,11 @@ async def get_role(self, role, params=None, headers=None): ) @query_params() - async def get_roles(self, params=None, headers=None): + async def get_roles( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all roles. @@ -295,7 +376,12 @@ async def get_roles(self, params=None, headers=None): ) @query_params() - async def delete_role(self, role, params=None, headers=None): + async def delete_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete the specified role. @@ -312,7 +398,13 @@ async def delete_role(self, role, params=None, headers=None): ) @query_params() - async def create_role(self, role, body, params=None, headers=None): + async def create_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role. @@ -331,7 +423,13 @@ async def create_role(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role(self, role, body, params=None, headers=None): + async def patch_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role. @@ -350,7 +448,12 @@ async def patch_role(self, role, body, params=None, headers=None): ) @query_params() - async def patch_roles(self, body, params=None, headers=None): + async def patch_roles( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. @@ -368,7 +471,12 @@ async def patch_roles(self, body, params=None, headers=None): ) @query_params() - async def get_role_mapping(self, role, params=None, headers=None): + async def get_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role mapping. @@ -385,7 +493,11 @@ async def get_role_mapping(self, role, params=None, headers=None): ) @query_params() - async def get_role_mappings(self, params=None, headers=None): + async def get_role_mappings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all role mappings. @@ -398,7 +510,12 @@ async def get_role_mappings(self, params=None, headers=None): ) @query_params() - async def delete_role_mapping(self, role, params=None, headers=None): + async def delete_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes the specified role mapping. @@ -415,7 +532,13 @@ async def delete_role_mapping(self, role, params=None, headers=None): ) @query_params() - async def create_role_mapping(self, role, body, params=None, headers=None): + async def create_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role mapping. @@ -434,7 +557,13 @@ async def create_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role_mapping(self, role, body, params=None, headers=None): + async def patch_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role mapping. @@ -453,7 +582,12 @@ async def patch_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role_mappings(self, body, params=None, headers=None): + async def patch_role_mappings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates multiple role mappings in a single call. @@ -471,7 +605,12 @@ async def patch_role_mappings(self, body, params=None, headers=None): ) @query_params() - async def get_tenant(self, tenant, params=None, headers=None): + async def get_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one tenant. @@ -488,7 +627,11 @@ async def get_tenant(self, tenant, params=None, headers=None): ) @query_params() - async def get_tenants(self, params=None, headers=None): + async def get_tenants( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all tenants. @@ -498,7 +641,12 @@ async def get_tenants(self, params=None, headers=None): ) @query_params() - async def delete_tenant(self, tenant, params=None, headers=None): + async def delete_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete the specified tenant. @@ -515,7 +663,13 @@ async def delete_tenant(self, tenant, params=None, headers=None): ) @query_params() - async def create_tenant(self, tenant, body, params=None, headers=None): + async def create_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified tenant. @@ -534,7 +688,13 @@ async def create_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - async def patch_tenant(self, tenant, body, params=None, headers=None): + async def patch_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify a single tenant. @@ -553,7 +713,12 @@ async def patch_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - async def patch_tenants(self, body, params=None, headers=None): + async def patch_tenants( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify multiple tenants in a single call. @@ -571,7 +736,11 @@ async def patch_tenants(self, body, params=None, headers=None): ) @query_params() - async def get_configuration(self, params=None, headers=None): + async def get_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the current Security plugin configuration in JSON format. @@ -584,7 +753,12 @@ async def get_configuration(self, params=None, headers=None): ) @query_params() - async def update_configuration(self, body, params=None, headers=None): + async def update_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds or updates the existing configuration using the REST API. @@ -602,7 +776,12 @@ async def update_configuration(self, body, params=None, headers=None): ) @query_params() - async def patch_configuration(self, body, params=None, headers=None): + async def patch_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ A PATCH call is used to update the existing configuration using the REST API. @@ -621,8 +800,11 @@ async def patch_configuration(self, body, params=None, headers=None): @query_params() async def get_distinguished_names( - self, cluster_name=None, params=None, headers=None - ): + self, + cluster_name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all distinguished names in the allow list. @@ -637,8 +819,12 @@ async def get_distinguished_names( @query_params() async def update_distinguished_names( - self, cluster_name, body=None, params=None, headers=None - ): + self, + cluster_name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds or updates the specified distinguished names in the cluster’s or node’s allow list. @@ -659,7 +845,12 @@ async def update_distinguished_names( ) @query_params() - async def delete_distinguished_names(self, cluster_name, params=None, headers=None): + async def delete_distinguished_names( + self, + cluster_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes all distinguished names in the specified cluster’s or node’s allow list. @@ -679,7 +870,11 @@ async def delete_distinguished_names(self, cluster_name, params=None, headers=No ) @query_params() - async def get_certificates(self, params=None, headers=None): + async def get_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves the cluster’s security certificates. @@ -689,7 +884,11 @@ async def get_certificates(self, params=None, headers=None): ) @query_params() - async def reload_transport_certificates(self, params=None, headers=None): + async def reload_transport_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reload transport layer communication certificates. @@ -702,7 +901,11 @@ async def reload_transport_certificates(self, params=None, headers=None): ) @query_params() - async def reload_http_certificates(self, params=None, headers=None): + async def reload_http_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reload HTTP layer communication certificates. @@ -715,7 +918,11 @@ async def reload_http_certificates(self, params=None, headers=None): ) @query_params() - async def flush_cache(self, params=None, headers=None): + async def flush_cache( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. @@ -725,7 +932,11 @@ async def flush_cache(self, params=None, headers=None): ) @query_params() - async def health(self, params=None, headers=None): + async def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Checks to see if the Security plugin is up and running. @@ -735,7 +946,11 @@ async def health(self, params=None, headers=None): ) @query_params() - async def get_audit_configuration(self, params=None, headers=None): + async def get_audit_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves the audit configuration. @@ -745,7 +960,12 @@ async def get_audit_configuration(self, params=None, headers=None): ) @query_params() - async def update_audit_configuration(self, body, params=None, headers=None): + async def update_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the audit configuration. @@ -763,7 +983,12 @@ async def update_audit_configuration(self, body, params=None, headers=None): ) @query_params() - async def patch_audit_configuration(self, body, params=None, headers=None): + async def patch_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. @@ -781,7 +1006,12 @@ async def patch_audit_configuration(self, body, params=None, headers=None): ) @query_params() - async def patch_distinguished_names(self, body, params=None, headers=None): + async def patch_distinguished_names( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Bulk update of distinguished names. diff --git a/opensearchpy/_async/client/security.pyi b/opensearchpy/_async/client/security.pyi deleted file mode 100644 index b3010b3b..00000000 --- a/opensearchpy/_async/client/security.pyi +++ /dev/null @@ -1,821 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SecurityClient(NamespacedClient): - async def get_account_details( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def change_password( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_action_groups( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_action_groups( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_users( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_users( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_roles( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_roles( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_role_mappings( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_role_mappings( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_tenants( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_tenants( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_distinguished_names( - self, - *, - cluster_name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_distinguished_names( - self, - cluster_name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_distinguished_names( - self, - cluster_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reload_transport_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reload_http_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def flush_cache( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def health( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_audit_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def patch_distinguished_names( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index f9960b64..97ffec72 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -36,12 +36,21 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - async def create(self, repository, snapshot, body=None, params=None, headers=None): + async def create( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a snapshot in a repository. @@ -70,7 +79,13 @@ async def create(self, repository, snapshot, body=None, params=None, headers=Non ) @query_params("cluster_manager_timeout", "master_timeout") - async def delete(self, repository, snapshot, params=None, headers=None): + async def delete( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a snapshot. @@ -97,7 +112,13 @@ async def delete(self, repository, snapshot, params=None, headers=None): @query_params( "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) - async def get(self, repository, snapshot, params=None, headers=None): + async def get( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a snapshot. @@ -127,7 +148,12 @@ async def get(self, repository, snapshot, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def delete_repository(self, repository, params=None, headers=None): + async def delete_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a repository. @@ -152,7 +178,12 @@ async def delete_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - async def get_repository(self, repository=None, params=None, headers=None): + async def get_repository( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a repository. @@ -171,7 +202,13 @@ async def get_repository(self, repository=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") - async def create_repository(self, repository, body, params=None, headers=None): + async def create_repository( + self, + repository: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a repository. @@ -199,7 +236,14 @@ async def create_repository(self, repository, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - async def restore(self, repository, snapshot, body=None, params=None, headers=None): + async def restore( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Restores a snapshot. @@ -228,7 +272,13 @@ async def restore(self, repository, snapshot, body=None, params=None, headers=No ) @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") - async def status(self, repository=None, snapshot=None, params=None, headers=None): + async def status( + self, + repository: Any = None, + snapshot: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the status of a snapshot. @@ -252,7 +302,12 @@ async def status(self, repository=None, snapshot=None, params=None, headers=None ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def verify_repository(self, repository, params=None, headers=None): + async def verify_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Verifies a repository. @@ -276,7 +331,12 @@ async def verify_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - async def cleanup_repository(self, repository, params=None, headers=None): + async def cleanup_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes stale data from repository. @@ -301,8 +361,14 @@ async def cleanup_repository(self, repository, params=None, headers=None): @query_params("cluster_manager_timeout", "master_timeout") async def clone( - self, repository, snapshot, target_snapshot, body, params=None, headers=None - ): + self, + repository: Any, + snapshot: Any, + target_snapshot: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. diff --git a/opensearchpy/_async/client/snapshot.pyi b/opensearchpy/_async/client/snapshot.pyi deleted file mode 100644 index b219a323..00000000 --- a/opensearchpy/_async/client/snapshot.pyi +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SnapshotClient(NamespacedClient): - async def create( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_repository( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_repository( - self, - repository: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - verify: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def restore( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def status( - self, - *, - repository: Optional[Any] = ..., - snapshot: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def verify_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cleanup_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clone( - self, - repository: Any, - snapshot: Any, - target_snapshot: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 7efce482..39aefe93 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -37,6 +37,7 @@ import warnings +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -51,7 +52,11 @@ class TasksClient(NamespacedClient): "timeout", "wait_for_completion", ) - async def list(self, params=None, headers=None): + async def list( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of tasks. @@ -77,7 +82,12 @@ async def list(self, params=None, headers=None): ) @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") - async def cancel(self, task_id=None, params=None, headers=None): + async def cancel( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Cancels a task, if it can be cancelled through an API. @@ -103,7 +113,12 @@ async def cancel(self, task_id=None, params=None, headers=None): ) @query_params("timeout", "wait_for_completion") - async def get(self, task_id=None, params=None, headers=None): + async def get( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a task. diff --git a/opensearchpy/_async/client/tasks.pyi b/opensearchpy/_async/client/tasks.pyi deleted file mode 100644 index f3cf05d0..00000000 --- a/opensearchpy/_async/client/tasks.pyi +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class TasksClient(NamespacedClient): - async def list( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - group_by: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cancel( - self, - *, - task_id: Optional[Any] = ..., - actions: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - *, - task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/utils.py b/opensearchpy/_async/client/utils.py index 59bedb8e..45ad552b 100644 --- a/opensearchpy/_async/client/utils.py +++ b/opensearchpy/_async/client/utils.py @@ -35,3 +35,13 @@ _normalize_hosts, query_params, ) + +__all__ = [ + "SKIP_IN_PATH", + "NamespacedClient", + "_make_path", + "query_params", + "_bulk_body", + "_escape", + "_normalize_hosts", +] diff --git a/opensearchpy/_async/client/utils.pyi b/opensearchpy/_async/client/utils.pyi deleted file mode 100644 index e175d5e2..00000000 --- a/opensearchpy/_async/client/utils.pyi +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ...client.utils import SKIP_IN_PATH as SKIP_IN_PATH -from ...client.utils import _bulk_body as _bulk_body -from ...client.utils import _escape as _escape -from ...client.utils import _make_path as _make_path # noqa -from ...client.utils import _normalize_hosts as _normalize_hosts -from ...client.utils import query_params as query_params -from ..client import AsyncOpenSearch -from ..transport import AsyncTransport - -class NamespacedClient: - client: AsyncOpenSearch - def __init__(self, client: AsyncOpenSearch) -> None: ... - @property - def transport(self) -> AsyncTransport: ... diff --git a/opensearchpy/_async/compat.py b/opensearchpy/_async/compat.py index 66c2eca8..2ba1b980 100644 --- a/opensearchpy/_async/compat.py +++ b/opensearchpy/_async/compat.py @@ -39,7 +39,7 @@ from asyncio import get_running_loop except ImportError: - def get_running_loop(): + def get_running_loop() -> asyncio.AbstractEventLoop: loop = asyncio.get_event_loop() if not loop.is_running(): raise RuntimeError("no running event loop") diff --git a/opensearchpy/_async/compat.pyi b/opensearchpy/_async/compat.pyi deleted file mode 100644 index 290396de..00000000 --- a/opensearchpy/_async/compat.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import asyncio - -def get_running_loop() -> asyncio.AbstractEventLoop: ... diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 1f49220f..c85b2ac8 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -32,6 +32,18 @@ import asyncio import logging +from typing import ( + Any, + AsyncGenerator, + AsyncIterable, + Collection, + Iterable, + List, + Optional, + Tuple, + TypeVar, + Union, +) from ...compat import map from ...exceptions import TransportError @@ -43,10 +55,12 @@ ) from ...helpers.errors import ScanError -logger = logging.getLogger("opensearchpy.helpers") +logger: logging.Logger = logging.getLogger("opensearchpy.helpers") -async def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): +async def _chunk_actions( + actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Any +) -> AsyncGenerator[Any, None]: """ Split actions into chunks by number or size, serialize them into strings in the process. @@ -64,15 +78,15 @@ async def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): async def _process_bulk_chunk( - client, - bulk_actions, - bulk_data, - raise_on_exception=True, - raise_on_error=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + bulk_actions: Any, + bulk_data: Any, + raise_on_exception: bool = True, + raise_on_error: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> AsyncGenerator[Tuple[bool, Any], None]: """ Send a bulk request to opensearch and process the output. """ @@ -101,21 +115,26 @@ async def _process_bulk_chunk( yield item -def aiter(x): +T = TypeVar("T") + + +def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> Any: """Turns an async iterable or iterable into an async iterator""" if hasattr(x, "__anext__"): return x elif hasattr(x, "__aiter__"): return x.__aiter__() - async def f(): + async def f() -> Any: for item in x: yield item return f().__aiter__() -async def azip(*iterables): +async def azip( + *iterables: Union[Iterable[T], AsyncIterable[T]] +) -> AsyncGenerator[Tuple[T, ...], None]: """Zips async iterables and iterables into an async iterator with the same behavior as zip() """ @@ -128,21 +147,21 @@ async def azip(*iterables): async def async_streaming_bulk( - client, - actions, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - raise_on_error=True, - expand_action_callback=expand_action, - raise_on_exception=True, - max_retries=0, - initial_backoff=2, - max_backoff=600, - yield_ok=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + raise_on_error: bool = True, + expand_action_callback: Any = expand_action, + raise_on_exception: bool = True, + max_retries: int = 0, + initial_backoff: Union[float, int] = 2, + max_backoff: Union[float, int] = 600, + yield_ok: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> AsyncGenerator[Tuple[bool, Any], None]: """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -177,7 +196,7 @@ async def async_streaming_bulk( :arg ignore_status: list of HTTP status code that you want to ignore """ - async def map_actions(): + async def map_actions() -> Any: async for item in aiter(actions): yield expand_action_callback(item) @@ -185,7 +204,8 @@ async def map_actions(): map_actions(), chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): - to_retry, to_retry_data = [], [] + to_retry: Any = [] + to_retry_data: Any = [] if attempt: await asyncio.sleep( min(max_backoff, initial_backoff * 2 ** (attempt - 1)) @@ -237,8 +257,13 @@ async def map_actions(): async def async_bulk( - client, actions, stats_only=False, ignore_status=(), *args, **kwargs -): + client: Any, + actions: Union[Iterable[Any], AsyncIterable[Any]], + stats_only: bool = False, + ignore_status: Optional[Union[int, Collection[int]]] = (), + *args: Any, + **kwargs: Any +) -> Tuple[int, Union[int, List[Any]]]: """ Helper for the :meth:`~opensearchpy.AsyncOpenSearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and @@ -274,7 +299,7 @@ async def async_bulk( # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True - async for ok, item in async_streaming_bulk( + async for ok, item in async_streaming_bulk( # type: ignore client, actions, ignore_status=ignore_status, *args, **kwargs ): # go through request-response pairs and detect failures @@ -289,17 +314,17 @@ async def async_bulk( async def async_scan( - client, - query=None, - scroll="5m", - raise_on_error=True, - preserve_order=False, - size=1000, - request_timeout=None, - clear_scroll=True, - scroll_kwargs=None, - **kwargs -): + client: Any, + query: Any = None, + scroll: str = "5m", + raise_on_error: bool = True, + preserve_order: bool = False, + size: int = 1000, + request_timeout: Any = None, + clear_scroll: bool = True, + scroll_kwargs: Any = None, + **kwargs: Any +) -> Any: """ Simple abstraction on top of the :meth:`~opensearchpy.AsyncOpenSearch.scroll` api - a simple iterator that @@ -409,16 +434,16 @@ async def async_scan( async def async_reindex( - client, - source_index, - target_index, - query=None, - target_client=None, - chunk_size=500, - scroll="5m", - scan_kwargs={}, - bulk_kwargs={}, -): + client: Any, + source_index: Union[str, Collection[str]], + target_index: str, + query: Any = None, + target_client: Any = None, + chunk_size: int = 500, + scroll: str = "5m", + scan_kwargs: Any = {}, + bulk_kwargs: Any = {}, +) -> Tuple[int, Union[int, List[Any]]]: """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. @@ -454,7 +479,7 @@ async def async_reindex( client, query=query, index=source_index, scroll=scroll, **scan_kwargs ) - async def _change_doc_index(hits, index): + async def _change_doc_index(hits: Any, index: Any) -> Any: async for h in hits: h["_index"] = index if "fields" in h: diff --git a/opensearchpy/_async/helpers/actions.pyi b/opensearchpy/_async/helpers/actions.pyi deleted file mode 100644 index 20cc0661..00000000 --- a/opensearchpy/_async/helpers/actions.pyi +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import ( - Any, - AsyncGenerator, - AsyncIterable, - Callable, - Collection, - Dict, - Iterable, - List, - Mapping, - Optional, - Tuple, - TypeVar, - Union, -) - -from ...serializer import Serializer -from ..client import AsyncOpenSearch - -logger: logging.Logger - -T = TypeVar("T") - -def _chunk_actions( - actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer -) -> AsyncGenerator[Any, None]: ... -def _process_bulk_chunk( - client: AsyncOpenSearch, - bulk_actions: Any, - bulk_data: Any, - raise_on_exception: bool = ..., - raise_on_error: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> AsyncGenerator[Tuple[bool, Any], None]: ... -def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> AsyncGenerator[T, None]: ... -def azip( - *iterables: Union[Iterable[T], AsyncIterable[T]] -) -> AsyncGenerator[Tuple[T, ...], None]: ... -def async_streaming_bulk( - client: AsyncOpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - chunk_size: int = ..., - max_chunk_bytes: int = ..., - raise_on_error: bool = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - raise_on_exception: bool = ..., - max_retries: int = ..., - initial_backoff: Union[float, int] = ..., - max_backoff: Union[float, int] = ..., - yield_ok: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> AsyncGenerator[Tuple[bool, Any], None]: ... -async def async_bulk( - client: AsyncOpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - stats_only: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, Union[int, List[Any]]]: ... -def async_scan( - client: AsyncOpenSearch, - query: Optional[Any] = ..., - scroll: str = ..., - raise_on_error: bool = ..., - preserve_order: bool = ..., - size: int = ..., - request_timeout: Optional[Union[float, int]] = ..., - clear_scroll: bool = ..., - scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any -) -> AsyncGenerator[dict[str, Any], None]: ... -async def async_reindex( - client: AsyncOpenSearch, - source_index: Union[str, Collection[str]], - target_index: str, - query: Any = ..., - target_client: Optional[AsyncOpenSearch] = ..., - chunk_size: int = ..., - scroll: str = ..., - scan_kwargs: Optional[Mapping[str, Any]] = ..., - bulk_kwargs: Optional[Mapping[str, Any]] = ..., -) -> Tuple[int, Union[int, List[Any]]]: ... diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index e71bef46..25196e01 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -8,15 +8,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from fnmatch import fnmatch +from typing import Any, Optional, Sequence, Tuple, Type from six import add_metaclass +from opensearchpy._async.client import AsyncOpenSearch from opensearchpy._async.helpers.index import AsyncIndex from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy.connection.async_connections import get_connection @@ -35,7 +33,12 @@ class AsyncIndexMeta(DocumentMeta): # class, only user defined subclasses should have an _index attr _document_initialized = False - def __new__(cls, name, bases, attrs): + def __new__( + cls, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: new_cls = super(AsyncIndexMeta, cls).__new__(cls, name, bases, attrs) if cls._document_initialized: index_opts = attrs.pop("Index", None) @@ -46,7 +49,7 @@ def __new__(cls, name, bases, attrs): return new_cls @classmethod - def construct_index(cls, opts, bases): + def construct_index(cls, opts: Any, bases: Any) -> Any: if opts is None: for b in bases: if hasattr(b, "_index"): @@ -72,25 +75,27 @@ class AsyncDocument(ObjectBase): """ @classmethod - def _matches(cls, hit): + def _matches(cls: Any, hit: Any) -> bool: if cls._index._name is None: return True return fnmatch(hit.get("_index", ""), cls._index._name) @classmethod - def _get_using(cls, using=None): + def _get_using(cls: Any, using: Any = None) -> Any: return using or cls._index._using @classmethod - async def _get_connection(cls, using=None): + async def _get_connection(cls, using: Optional[AsyncOpenSearch] = None) -> Any: return await get_connection(cls._get_using(using)) @classmethod - def _default_index(cls, index=None): + def _default_index(cls: Any, index: Any = None) -> Any: return index or cls._index._name @classmethod - async def init(cls, index=None, using=None): + async def init( + cls: Any, index: Optional[str] = None, using: Optional[AsyncOpenSearch] = None + ) -> None: """ Create the index and populate the mappings in opensearch. """ @@ -99,7 +104,9 @@ async def init(cls, index=None, using=None): i = i.clone(name=index) await i.save(using=using) - def _get_index(self, index=None, required=True): + def _get_index( + self, index: Optional[str] = None, required: Optional[bool] = True + ) -> Any: if index is None: index = getattr(self.meta, "index", None) if index is None: @@ -110,7 +117,7 @@ def _get_index(self, index=None, required=True): raise ValidationException("You cannot write to a wildcard index.") return index - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( @@ -121,7 +128,9 @@ def __repr__(self): ) @classmethod - def search(cls, using=None, index=None): + def search( + cls, using: Optional[AsyncOpenSearch] = None, index: Optional[str] = None + ) -> AsyncSearch: """ Create an :class:`~opensearchpy.AsyncSearch` instance that will search over this ``Document``. @@ -131,7 +140,13 @@ def search(cls, using=None, index=None): ) @classmethod - async def get(cls, id, using=None, index=None, **kwargs): + async def get( # type: ignore + cls, + id: str, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any, + ) -> Any: """ Retrieve a single document from opensearch using its ``id``. @@ -150,7 +165,13 @@ async def get(cls, id, using=None, index=None, **kwargs): return cls.from_opensearch(doc) @classmethod - async def exists(cls, id, using=None, index=None, **kwargs): + async def exists( + cls, + id: str, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any, + ) -> Any: """ check if exists a single document from opensearch using its ``id``. @@ -167,13 +188,19 @@ async def exists(cls, id, using=None, index=None, **kwargs): @classmethod async def mget( - cls, docs, using=None, index=None, raise_on_error=True, missing="none", **kwargs - ): - r""" - Retrieve multiple document by their ``id``\s. Returns a list of instances + cls, + docs: Sequence[str], + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + raise_on_error: Optional[bool] = True, + missing: Optional[str] = "none", + **kwargs: Any, + ) -> Any: + """ + Retrieve multiple document by their ``id``'s. Returns a list of instances in the same order as requested. - :arg docs: list of ``id``\s of the documents to be retrieved or a list + :arg docs: list of ``id``'s of the documents to be retrieved or a list of document specifications as per https://opensearch.org/docs/latest/opensearch/rest-api/document-apis/multi-get/ :arg index: opensearch index to use, if the ``Document`` is @@ -197,7 +224,9 @@ async def mget( } results = await opensearch.mget(body, index=cls._default_index(index), **kwargs) - objs, error_docs, missing_docs = [], [], [] + objs: Any = [] + error_docs: Any = [] + missing_docs: Any = [] for doc in results["docs"]: if doc.get("found"): if error_docs or missing_docs: @@ -230,7 +259,12 @@ async def mget( raise NotFoundError(404, message, {"docs": missing_docs}) return objs - async def delete(self, using=None, index=None, **kwargs): + async def delete( + self, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any, + ) -> Any: """ Delete the instance in opensearch. @@ -253,7 +287,9 @@ async def delete(self, using=None, index=None, **kwargs): doc_meta.update(kwargs) await opensearch.delete(index=self._get_index(index), **doc_meta) - def to_dict(self, include_meta=False, skip_empty=True): + def to_dict( # type: ignore + self, include_meta: Optional[bool] = False, skip_empty: Optional[bool] = True + ) -> Any: """ Serialize the instance into a dictionary so that it can be saved in opensearch. @@ -264,7 +300,7 @@ def to_dict(self, include_meta=False, skip_empty=True): ``[]``, ``{}``) to be left on the document. Those values will be stripped out otherwise as they make no difference in opensearch. """ - d = super(AsyncDocument, self).to_dict(skip_empty=skip_empty) + d = super(AsyncDocument, self).to_dict(skip_empty) if not include_meta: return d @@ -280,19 +316,19 @@ def to_dict(self, include_meta=False, skip_empty=True): async def update( self, - using=None, - index=None, - detect_noop=True, - doc_as_upsert=False, - refresh=False, - retry_on_conflict=None, - script=None, - script_id=None, - scripted_upsert=False, - upsert=None, - return_doc_meta=False, - **fields - ): + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + detect_noop: Optional[bool] = True, + doc_as_upsert: Optional[bool] = False, + refresh: Optional[bool] = False, + retry_on_conflict: Optional[bool] = None, + script: Any = None, + script_id: Optional[str] = None, + scripted_upsert: Optional[bool] = False, + upsert: Optional[bool] = None, + return_doc_meta: Optional[bool] = False, + **fields: Any, + ) -> Any: """ Partial update of the document, specify fields you wish to update and both the instance and the document in opensearch will be updated:: @@ -321,7 +357,7 @@ async def update( :return operation result noop/updated """ - body = { + body: Any = { "doc_as_upsert": doc_as_upsert, "detect_noop": detect_noop, } @@ -385,13 +421,13 @@ async def update( async def save( self, - using=None, - index=None, - validate=True, - skip_empty=True, - return_doc_meta=False, - **kwargs - ): + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + validate: Optional[bool] = True, + skip_empty: Optional[bool] = True, + return_doc_meta: Optional[bool] = False, + **kwargs: Any, + ) -> Any: """ Save the document into opensearch. If the document doesn't exist it is created, it is overwritten otherwise. Returns ``True`` if this @@ -428,7 +464,7 @@ async def save( meta = await opensearch.index( index=self._get_index(index), body=self.to_dict(skip_empty=skip_empty), - **doc_meta + **doc_meta, ) # update meta information from OpenSearch for k in META_FIELDS: diff --git a/opensearchpy/_async/helpers/document.pyi b/opensearchpy/_async/helpers/document.pyi deleted file mode 100644 index f39d5471..00000000 --- a/opensearchpy/_async/helpers/document.pyi +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.document import DocumentMeta -from opensearchpy.helpers.utils import ObjectBase - -class AsyncIndexMeta(DocumentMeta): ... -class AsyncDocument(ObjectBase): ... diff --git a/opensearchpy/_async/helpers/faceted_search.py b/opensearchpy/_async/helpers/faceted_search.py index 86f22e00..1eb5a677 100644 --- a/opensearchpy/_async/helpers/faceted_search.py +++ b/opensearchpy/_async/helpers/faceted_search.py @@ -9,6 +9,8 @@ # GitHub history for details. +from typing import Any + from six import iteritems, itervalues from opensearchpy._async.helpers.search import AsyncSearch @@ -58,38 +60,38 @@ def search(self): """ - index = None - doc_types = None - fields = None - facets = {} - using = "default" + index: Any = None + doc_types: Any = None + fields: Any = None + facets: Any = {} + using: str = "default" - def __init__(self, query=None, filters={}, sort=()): + def __init__(self, query: Any = None, filters: Any = {}, sort: Any = ()) -> None: """ :arg query: the text to search for :arg filters: facet values to filter :arg sort: sort information to be passed to :class:`~opensearchpy.AsyncSearch` """ self._query = query - self._filters = {} + self._filters: Any = {} self._sort = sort - self.filter_values = {} + self.filter_values: Any = {} for name, value in iteritems(filters): self.add_filter(name, value) self._s = self.build_search() - async def count(self): + async def count(self) -> Any: return await self._s.count() - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: self._s = self._s[k] return self - def __iter__(self): + def __iter__(self) -> Any: return iter(self._s) - def add_filter(self, name, filter_values): + def add_filter(self, name: Any, filter_values: Any) -> None: """ Add a filter for a facet. """ @@ -111,7 +113,7 @@ def add_filter(self, name, filter_values): self._filters[name] = f - def search(self): + def search(self) -> Any: """ Returns the base Search object to which the facets are added. @@ -121,7 +123,7 @@ def search(self): s = AsyncSearch(doc_type=self.doc_types, index=self.index, using=self.using) return s.response_class(FacetedResponse) - def query(self, search, query): + def query(self, search: Any, query: Any) -> Any: """ Add query part to ``search``. @@ -134,7 +136,7 @@ def query(self, search, query): return search.query("multi_match", query=query) return search - def aggregate(self, search): + def aggregate(self, search: Any) -> Any: """ Add aggregations representing the facets selected, including potential filters. @@ -150,7 +152,7 @@ def aggregate(self, search): f, agg ) - def filter(self, search): + def filter(self, search: Any) -> Any: """ Add a ``post_filter`` to the search request narrowing the results based on the facet filters. @@ -163,7 +165,7 @@ def filter(self, search): post_filter &= f return search.post_filter(post_filter) - def highlight(self, search): + def highlight(self, search: Any) -> Any: """ Add highlighting for all the fields """ @@ -171,7 +173,7 @@ def highlight(self, search): *(f if "^" not in f else f.split("^", 1)[0] for f in self.fields) ) - def sort(self, search): + def sort(self, search: Any) -> Any: """ Add sorting information to the request. """ @@ -179,7 +181,7 @@ def sort(self, search): search = search.sort(*self._sort) return search - def build_search(self): + def build_search(self) -> Any: """ Construct the ``AsyncSearch`` object. """ @@ -192,7 +194,7 @@ def build_search(self): self.aggregate(s) return s - async def execute(self): + async def execute(self) -> Any: """ Execute the search and return the response. """ diff --git a/opensearchpy/_async/helpers/faceted_search.pyi b/opensearchpy/_async/helpers/faceted_search.pyi deleted file mode 100644 index 0e79f1f6..00000000 --- a/opensearchpy/_async/helpers/faceted_search.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncFacetedSearch(object): ... diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index 51082dc6..ea06f316 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from opensearchpy._async.helpers.mapping import AsyncMapping from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy._async.helpers.update_by_query import AsyncUpdateByQuery @@ -18,7 +20,14 @@ class AsyncIndexTemplate(object): - def __init__(self, name, template, index=None, order=None, **kwargs): + def __init__( + self, + name: Any, + template: Any, + index: Any = None, + order: Any = None, + **kwargs: Any + ) -> None: if index is None: self._index = AsyncIndex(template, **kwargs) else: @@ -32,17 +41,17 @@ def __init__(self, name, template, index=None, order=None, **kwargs): self._template_name = name self.order = order - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._index, attr_name) - def to_dict(self): + def to_dict(self) -> Any: d = self._index.to_dict() d["index_patterns"] = [self._index._name] if self.order is not None: d["order"] = self.order return d - async def save(self, using=None): + async def save(self, using: Any = None) -> Any: opensearch = await get_connection(using or self._index._using) return await opensearch.indices.put_template( name=self._template_name, body=self.to_dict() @@ -50,25 +59,27 @@ async def save(self, using=None): class AsyncIndex(object): - def __init__(self, name, using="default"): + def __init__(self, name: Any, using: str = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` """ self._name = name - self._doc_types = [] + self._doc_types: Any = [] self._using = using - self._settings = {} - self._aliases = {} - self._analysis = {} - self._mapping = None + self._settings: Any = {} + self._aliases: Any = {} + self._analysis: Any = {} + self._mapping: Any = None - def get_or_create_mapping(self): + def get_or_create_mapping(self) -> Any: if self._mapping is None: self._mapping = AsyncMapping() return self._mapping - def as_template(self, template_name, pattern=None, order=None): + def as_template( + self, template_name: Any, pattern: Any = None, order: Any = None + ) -> Any: # TODO: should we allow pattern to be a top-level arg? # or maybe have an IndexPattern that allows for it and have # AsyncDocument._index be that? @@ -76,7 +87,7 @@ def as_template(self, template_name, pattern=None, order=None): template_name, pattern or self._name, index=self, order=order ) - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: for doc in self._doc_types: nested, field = doc._doc_type.mapping.resolve_nested(field_path) if field is not None: @@ -85,7 +96,7 @@ def resolve_nested(self, field_path): return self._mapping.resolve_nested(field_path) return (), None - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: for doc in self._doc_types: field = doc._doc_type.mapping.resolve_field(field_path) if field is not None: @@ -94,12 +105,12 @@ def resolve_field(self, field_path): return self._mapping.resolve_field(field_path) return None - async def load_mappings(self, using=None): + async def load_mappings(self, using: Any = None) -> None: await self.get_or_create_mapping().update_from_opensearch( self._name, using=using or self._using ) - def clone(self, name=None, using=None): + def clone(self, name: Any = None, using: Any = None) -> Any: """ Create a copy of the instance with another name or connection alias. Useful for creating multiple indices with shared configuration:: @@ -123,14 +134,14 @@ def clone(self, name=None, using=None): i._mapping = self._mapping._clone() return i - async def _get_connection(self, using=None): + async def _get_connection(self, using: Any = None) -> Any: if self._name is None: raise ValueError("You cannot perform API calls on the default index.") return await get_connection(using or self._using) connection = property(_get_connection) - def mapping(self, mapping): + def mapping(self, mapping: Any) -> None: """ Associate a mapping (an instance of :class:`~opensearchpy.AsyncMapping`) with this index. @@ -139,7 +150,7 @@ def mapping(self, mapping): """ self.get_or_create_mapping().update(mapping) - def document(self, document): + def document(self, document: Any) -> Any: """ Associate a :class:`~opensearchpy.AsyncDocument` subclass with an index. This means that, when this index is created, it will contain the @@ -170,7 +181,7 @@ class Post(AsyncDocument): return document - def settings(self, **kwargs): + def settings(self, **kwargs: Any) -> "AsyncIndex": """ Add settings to the index:: @@ -183,7 +194,7 @@ def settings(self, **kwargs): self._settings.update(kwargs) return self - def aliases(self, **kwargs): + def aliases(self, **kwargs: Any) -> "AsyncIndex": """ Add aliases to the index definition:: @@ -193,7 +204,7 @@ def aliases(self, **kwargs): self._aliases.update(kwargs) return self - def analyzer(self, *args, **kwargs): + def analyzer(self, *args: Any, **kwargs: Any) -> Any: """ Explicitly add an analyzer to an index. Note that all custom analyzers defined in mappings will also be created. This is useful for search analyzers. @@ -220,14 +231,14 @@ def analyzer(self, *args, **kwargs): # merge the definition merge(self._analysis, d, True) - def to_dict(self): + def to_dict(self) -> Any: out = {} if self._settings: out["settings"] = self._settings if self._aliases: out["aliases"] = self._aliases - mappings = self._mapping.to_dict() if self._mapping else {} - analysis = self._mapping._collect_analysis() if self._mapping else {} + mappings: Any = self._mapping.to_dict() if self._mapping else {} + analysis: Any = self._mapping._collect_analysis() if self._mapping else {} for d in self._doc_types: mapping = d._doc_type.mapping merge(mappings, mapping.to_dict(), True) @@ -239,7 +250,7 @@ def to_dict(self): out.setdefault("settings", {})["analysis"] = analysis return out - def search(self, using=None): + def search(self, using: Any = None) -> Any: """ Return a :class:`~opensearchpy.AsyncSearch` object searching over the index (or all the indices belonging to this template) and its @@ -249,7 +260,7 @@ def search(self, using=None): using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using=None): + def updateByQuery(self, using: Any = None) -> Any: """ Return a :class:`~opensearchpy.AsyncUpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match @@ -263,7 +274,7 @@ def updateByQuery(self, using=None): index=self._name, ) - async def create(self, using=None, **kwargs): + async def create(self, using: Any = None, **kwargs: Any) -> Any: """ Creates the index in opensearch. @@ -274,13 +285,13 @@ async def create(self, using=None, **kwargs): index=self._name, body=self.to_dict(), **kwargs ) - async def is_closed(self, using=None): + async def is_closed(self, using: Any = None) -> Any: state = await (await self._get_connection(using)).cluster.state( index=self._name, metric="metadata" ) return state["metadata"]["indices"][self._name]["state"] == "close" - async def save(self, using=None): + async def save(self, using: Any = None) -> Any: """ Sync the index definition with opensearch, creating the index if it doesn't exist and updating its settings and mappings if it does. @@ -334,7 +345,7 @@ async def save(self, using=None): if mappings: await self.put_mapping(using=using, body=mappings) - async def analyze(self, using=None, **kwargs): + async def analyze(self, using: Any = None, **kwargs: Any) -> Any: """ Perform the analysis process on a text and return the tokens breakdown of the text. @@ -346,7 +357,7 @@ async def analyze(self, using=None, **kwargs): index=self._name, **kwargs ) - async def refresh(self, using=None, **kwargs): + async def refresh(self, using: Any = None, **kwargs: Any) -> Any: """ Performs a refresh operation on the index. @@ -357,7 +368,7 @@ async def refresh(self, using=None, **kwargs): index=self._name, **kwargs ) - async def flush(self, using=None, **kwargs): + async def flush(self, using: Any = None, **kwargs: Any) -> Any: """ Performs a flush operation on the index. @@ -368,7 +379,7 @@ async def flush(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get(self, using=None, **kwargs): + async def get(self, using: Any = None, **kwargs: Any) -> Any: """ The get index API allows to retrieve information about the index. @@ -379,7 +390,7 @@ async def get(self, using=None, **kwargs): index=self._name, **kwargs ) - async def open(self, using=None, **kwargs): + async def open(self, using: Any = None, **kwargs: Any) -> Any: """ Opens the index in opensearch. @@ -390,7 +401,7 @@ async def open(self, using=None, **kwargs): index=self._name, **kwargs ) - async def close(self, using=None, **kwargs): + async def close(self, using: Any = None, **kwargs: Any) -> Any: """ Closes the index in opensearch. @@ -401,7 +412,7 @@ async def close(self, using=None, **kwargs): index=self._name, **kwargs ) - async def delete(self, using=None, **kwargs): + async def delete(self, using: Any = None, **kwargs: Any) -> Any: """ Deletes the index in opensearch. @@ -412,7 +423,7 @@ async def delete(self, using=None, **kwargs): index=self._name, **kwargs ) - async def exists(self, using=None, **kwargs): + async def exists(self, using: Any = None, **kwargs: Any) -> Any: """ Returns ``True`` if the index already exists in opensearch. @@ -423,7 +434,7 @@ async def exists(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_mapping(self, using=None, **kwargs): + async def put_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Register specific mapping definition for a specific type. @@ -434,7 +445,7 @@ async def put_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_mapping(self, using=None, **kwargs): + async def get_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve specific mapping definition for a specific type. @@ -445,7 +456,7 @@ async def get_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_field_mapping(self, using=None, **kwargs): + async def get_field_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve mapping definition of a specific field. @@ -456,7 +467,7 @@ async def get_field_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_alias(self, using=None, **kwargs): + async def put_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Create an alias for the index. @@ -467,7 +478,7 @@ async def put_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def exists_alias(self, using=None, **kwargs): + async def exists_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Return a boolean indicating whether given alias exists for this index. @@ -478,7 +489,7 @@ async def exists_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_alias(self, using=None, **kwargs): + async def get_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve a specified alias. @@ -489,7 +500,7 @@ async def get_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def delete_alias(self, using=None, **kwargs): + async def delete_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Delete specific alias. @@ -500,7 +511,7 @@ async def delete_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_settings(self, using=None, **kwargs): + async def get_settings(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve settings for the index. @@ -511,7 +522,7 @@ async def get_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_settings(self, using=None, **kwargs): + async def put_settings(self, using: Any = None, **kwargs: Any) -> Any: """ Change specific index level settings in real time. @@ -522,7 +533,7 @@ async def put_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - async def stats(self, using=None, **kwargs): + async def stats(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve statistics on different operations happening on the index. @@ -533,7 +544,7 @@ async def stats(self, using=None, **kwargs): index=self._name, **kwargs ) - async def segments(self, using=None, **kwargs): + async def segments(self, using: Any = None, **kwargs: Any) -> Any: """ Provide low level segments information that a Lucene index (shard level) is built with. @@ -545,7 +556,7 @@ async def segments(self, using=None, **kwargs): index=self._name, **kwargs ) - async def validate_query(self, using=None, **kwargs): + async def validate_query(self, using: Any = None, **kwargs: Any) -> Any: """ Validate a potentially expensive query without executing it. @@ -556,7 +567,7 @@ async def validate_query(self, using=None, **kwargs): index=self._name, **kwargs ) - async def clear_cache(self, using=None, **kwargs): + async def clear_cache(self, using: Any = None, **kwargs: Any) -> Any: """ Clear all caches or specific cached associated with the index. @@ -567,7 +578,7 @@ async def clear_cache(self, using=None, **kwargs): index=self._name, **kwargs ) - async def recovery(self, using=None, **kwargs): + async def recovery(self, using: Any = None, **kwargs: Any) -> Any: """ The indices recovery API provides insight into on-going shard recoveries for the index. @@ -579,7 +590,7 @@ async def recovery(self, using=None, **kwargs): index=self._name, **kwargs ) - async def upgrade(self, using=None, **kwargs): + async def upgrade(self, using: Any = None, **kwargs: Any) -> Any: """ Upgrade the index to the latest format. @@ -590,7 +601,7 @@ async def upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_upgrade(self, using=None, **kwargs): + async def get_upgrade(self, using: Any = None, **kwargs: Any) -> Any: """ Monitor how much of the index is upgraded. @@ -601,7 +612,7 @@ async def get_upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - async def shard_stores(self, using=None, **kwargs): + async def shard_stores(self, using: Any = None, **kwargs: Any) -> Any: """ Provides store information for shard copies of the index. Store information reports on which nodes shard copies exist, the shard copy @@ -615,7 +626,7 @@ async def shard_stores(self, using=None, **kwargs): index=self._name, **kwargs ) - async def forcemerge(self, using=None, **kwargs): + async def forcemerge(self, using: Any = None, **kwargs: Any) -> Any: """ The force merge API allows to force merging of the index through an API. The merge relates to the number of segments a Lucene index holds @@ -633,7 +644,7 @@ async def forcemerge(self, using=None, **kwargs): index=self._name, **kwargs ) - async def shrink(self, using=None, **kwargs): + async def shrink(self, using: Any = None, **kwargs: Any) -> Any: """ The shrink index API allows you to shrink an existing index into a new index with fewer primary shards. The number of primary shards in the diff --git a/opensearchpy/_async/helpers/index.pyi b/opensearchpy/_async/helpers/index.pyi deleted file mode 100644 index 6a89f0d1..00000000 --- a/opensearchpy/_async/helpers/index.pyi +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncIndexTemplate(object): ... -class AsyncIndex(object): ... diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index 967c74c8..dd560564 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -8,12 +8,9 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from itertools import chain +from typing import Any from six import iteritems @@ -23,25 +20,28 @@ class AsyncMapping(object): - def __init__(self): + _meta: Any + properties: Properties + + def __init__(self) -> None: self.properties = Properties() self._meta = {} - def __repr__(self): + def __repr__(self) -> str: return "Mapping()" - def _clone(self): + def _clone(self) -> Any: m = AsyncMapping() m.properties._params = self.properties._params.copy() return m @classmethod - async def from_opensearch(cls, index, using="default"): + async def from_opensearch(cls, index: Any, using: str = "default") -> Any: m = cls() await m.update_from_opensearch(index, using) return m - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: str) -> Any: field = self nested = [] parts = field_path.split(".") @@ -54,18 +54,18 @@ def resolve_nested(self, field_path): nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: field = self for step in field_path.split("."): try: field = field[step] except KeyError: - return + return None return field - def _collect_analysis(self): - analysis = {} - fields = [] + def _collect_analysis(self) -> Any: + analysis: Any = {} + fields: Any = [] if "_all" in self._meta: fields.append(Text(**self._meta["_all"])) @@ -91,20 +91,20 @@ def _collect_analysis(self): return analysis - async def save(self, index, using="default"): + async def save(self, index: Any, using: str = "default") -> Any: from opensearchpy._async.helpers.index import AsyncIndex index = AsyncIndex(index, using=using) index.mapping(self) return await index.save() - async def update_from_opensearch(self, index, using="default"): + async def update_from_opensearch(self, index: Any, using: str = "default") -> None: opensearch = await get_connection(using) raw = await opensearch.indices.get_mapping(index=index) _, raw = raw.popitem() self._update_from_dict(raw["mappings"]) - def _update_from_dict(self, raw): + def _update_from_dict(self, raw: Any) -> None: for name, definition in iteritems(raw.get("properties", {})): self.field(name, definition) @@ -116,7 +116,7 @@ def _update_from_dict(self, raw): else: self.meta(name, value) - def update(self, mapping, update_only=False): + def update(self, mapping: Any, update_only: bool = False) -> None: for name in mapping: if update_only and name in self: # nested and inner objects, merge recursively @@ -133,20 +133,20 @@ def update(self, mapping, update_only=False): else: self._meta.update(mapping._meta) - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self.properties.properties - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties.properties[name] - def __iter__(self): + def __iter__(self) -> Any: return iter(self.properties.properties) - def field(self, *args, **kwargs): + def field(self, *args: Any, **kwargs: Any) -> "AsyncMapping": self.properties.field(*args, **kwargs) return self - def meta(self, name, params=None, **kwargs): + def meta(self, name: Any, params: Any = None, **kwargs: Any) -> "AsyncMapping": if not name.startswith("_") and name not in META_FIELDS: name = "_" + name @@ -156,7 +156,7 @@ def meta(self, name, params=None, **kwargs): self._meta[name] = kwargs if params is None else params return self - def to_dict(self): + def to_dict(self) -> Any: meta = self._meta # hard coded serialization of analyzers in _all diff --git a/opensearchpy/_async/helpers/mapping.pyi b/opensearchpy/_async/helpers/mapping.pyi deleted file mode 100644 index 91b8d64b..00000000 --- a/opensearchpy/_async/helpers/mapping.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncMapping(object): ... diff --git a/opensearchpy/_async/helpers/search.py b/opensearchpy/_async/helpers/search.py index 73c52971..d844ba29 100644 --- a/opensearchpy/_async/helpers/search.py +++ b/opensearchpy/_async/helpers/search.py @@ -9,6 +9,7 @@ # GitHub history for details. import copy +from typing import Any, Sequence from six import iteritems, string_types @@ -26,7 +27,7 @@ class AsyncSearch(Request): query = ProxyDescriptor("query") post_filter = ProxyDescriptor("post_filter") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Search request to opensearch. @@ -40,24 +41,24 @@ def __init__(self, **kwargs): super(AsyncSearch, self).__init__(**kwargs) self.aggs = AggsProxy(self) - self._sort = [] - self._source = None - self._highlight = {} - self._highlight_opts = {} - self._suggest = {} - self._script_fields = {} - self._response_class = Response + self._sort: Sequence[Any] = [] + self._source: Any = None + self._highlight: Any = {} + self._highlight_opts: Any = {} + self._suggest: Any = {} + self._script_fields: Any = {} + self._response_class: Any = Response self._query_proxy = QueryProxy(self, "query") self._post_filter_proxy = QueryProxy(self, "post_filter") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) - def __getitem__(self, n): + def __getitem__(self, n: Any) -> Any: """ Support slicing the `AsyncSearch` instance for pagination. @@ -92,7 +93,7 @@ def __getitem__(self, n): return s @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `AsyncSearch` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -113,7 +114,7 @@ def from_dict(cls, d): s.update_from_dict(d) return s - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -136,7 +137,7 @@ def _clone(self): s.aggs._params = {"aggs": self.aggs._params["aggs"].copy()} return s - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -144,7 +145,7 @@ def response_class(self, cls): s._response_class = cls return s - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "AsyncSearch": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -179,7 +180,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script_fields(self, **kwargs): + def script_fields(self, **kwargs: Any) -> Any: """ Define script fields to be calculated on hits. @@ -205,7 +206,7 @@ def script_fields(self, **kwargs): s._script_fields.update(kwargs) return s - def source(self, fields=None, **kwargs): + def source(self, fields: Any = None, **kwargs: Any) -> Any: """ Selectively control how the _source field is returned. @@ -250,7 +251,7 @@ def source(self, fields=None, **kwargs): return s - def sort(self, *keys): + def sort(self, *keys: Any) -> Any: """ Add sorting information to the search request. If called without arguments it will remove all sort requirements. Otherwise it will @@ -283,7 +284,7 @@ def sort(self, *keys): s._sort.append(k) return s - def highlight_options(self, **kwargs): + def highlight_options(self, **kwargs: Any) -> Any: """ Update the global highlighting options used for this request. For example:: @@ -295,7 +296,7 @@ def highlight_options(self, **kwargs): s._highlight_opts.update(kwargs) return s - def highlight(self, *fields, **kwargs): + def highlight(self, *fields: Any, **kwargs: Any) -> Any: """ Request highlighting of some fields. All keyword arguments passed in will be used as parameters for all the fields in the ``fields`` parameter. Example:: @@ -335,7 +336,7 @@ def highlight(self, *fields, **kwargs): s._highlight[f] = kwargs return s - def suggest(self, name, text, **kwargs): + def suggest(self, name: str, text: str, **kwargs: Any) -> Any: """ Add a suggestions request to the search. @@ -352,7 +353,7 @@ def suggest(self, name, text, **kwargs): s._suggest[name].update(kwargs) return s - def to_dict(self, count=False, **kwargs): + def to_dict(self, count: bool = False, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request's body. @@ -396,7 +397,7 @@ def to_dict(self, count=False, **kwargs): d.update(recursive_to_dict(kwargs)) return d - async def count(self): + async def count(self) -> Any: """ Return the number of hits matching the query and filters. Note that only the actual number is returned. @@ -412,7 +413,7 @@ async def count(self): "count" ] - async def execute(self, ignore_cache=False): + async def execute(self, ignore_cache: bool = False) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. @@ -431,7 +432,7 @@ async def execute(self, ignore_cache=False): ) return self._response - async def scan(self): + async def scan(self) -> Any: """ Turn the search into a scan search and return a generator that will iterate over all the documents matching the query. @@ -449,7 +450,7 @@ async def scan(self): ): yield self._get_result(hit) - async def delete(self): + async def delete(self) -> Any: """ delete() executes the query by delegating to delete_by_query() """ @@ -469,22 +470,22 @@ class AsyncMultiSearch(Request): request. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(AsyncMultiSearch, self).__init__(**kwargs) - self._searches = [] + self._searches: Any = [] - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return self._searches[key] - def __iter__(self): + def __iter__(self) -> Any: return iter(self._searches) - def _clone(self): + def _clone(self) -> Any: ms = super(AsyncMultiSearch, self)._clone() ms._searches = self._searches[:] return ms - def add(self, search): + def add(self, search: Any) -> Any: """ Adds a new :class:`~opensearchpy.AsyncSearch` object to the request:: @@ -496,7 +497,7 @@ def add(self, search): ms._searches.append(search) return ms - def to_dict(self): + def to_dict(self) -> Any: out = [] for s in self._searches: meta = {} @@ -509,7 +510,9 @@ def to_dict(self): return out - async def execute(self, ignore_cache=False, raise_on_error=True): + async def execute( + self, ignore_cache: bool = False, raise_on_error: bool = True + ) -> Any: """ Execute the multi search request and return a list of search results. """ diff --git a/opensearchpy/_async/helpers/search.pyi b/opensearchpy/_async/helpers/search.pyi deleted file mode 100644 index 3413c889..00000000 --- a/opensearchpy/_async/helpers/search.pyi +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.search import Request - -class AsyncSearch(Request): ... -class AsyncMultiSearch(Request): ... diff --git a/opensearchpy/_async/helpers/test.py b/opensearchpy/_async/helpers/test.py index 895ae991..9516857c 100644 --- a/opensearchpy/_async/helpers/test.py +++ b/opensearchpy/_async/helpers/test.py @@ -10,18 +10,16 @@ import os import time +from typing import Any from unittest import SkipTest from opensearchpy import AsyncOpenSearch from opensearchpy.exceptions import ConnectionError -if "OPENSEARCH_URL" in os.environ: - OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] -else: - OPENSEARCH_URL = "https://admin:admin@localhost:9200" +OPENSEARCH_URL = os.environ.get("OPENSEARCH_URL", "https://admin:admin@localhost:9200") -async def get_test_client(nowait=False, **kwargs): +async def get_test_client(nowait: bool = False, **kwargs: Any) -> Any: # construct kwargs from the environment kw = {"timeout": 30} @@ -32,7 +30,7 @@ async def get_test_client(nowait=False, **kwargs): kw["connection_class"] = getattr(async_connection, "AIOHttpConnection") kw.update(kwargs) - client = AsyncOpenSearch(OPENSEARCH_URL, **kw) + client = AsyncOpenSearch(OPENSEARCH_URL, **kw) # type: ignore # wait for yellow status for _ in range(1 if nowait else 100): diff --git a/opensearchpy/_async/helpers/test.pyi b/opensearchpy/_async/helpers/test.pyi deleted file mode 100644 index 497d8caf..00000000 --- a/opensearchpy/_async/helpers/test.pyi +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any - -from _typeshed import Incomplete - -from opensearchpy import AsyncOpenSearch as AsyncOpenSearch -from opensearchpy.exceptions import ConnectionError as ConnectionError - -OPENSEARCH_URL: Incomplete - -async def get_test_client(nowait: bool = ..., **kwargs: Any) -> Any: ... diff --git a/opensearchpy/_async/helpers/update_by_query.py b/opensearchpy/_async/helpers/update_by_query.py index fc9eef54..aeb8e3d2 100644 --- a/opensearchpy/_async/helpers/update_by_query.py +++ b/opensearchpy/_async/helpers/update_by_query.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from opensearchpy.connection.async_connections import get_connection from opensearchpy.helpers.query import Bool, Q from opensearchpy.helpers.response import UpdateByQueryResponse @@ -18,7 +20,7 @@ class AsyncUpdateByQuery(Request): query = ProxyDescriptor("query") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Update by query request to opensearch. @@ -32,17 +34,17 @@ def __init__(self, **kwargs): """ super(AsyncUpdateByQuery, self).__init__(**kwargs) self._response_class = UpdateByQueryResponse - self._script = {} + self._script: Any = {} self._query_proxy = QueryProxy(self, "query") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `AsyncUpdateByQuery` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -63,7 +65,7 @@ def from_dict(cls, d): u.update_from_dict(d) return u - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -76,7 +78,7 @@ def _clone(self): ubq.query._proxied = self.query._proxied return ubq - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -84,7 +86,7 @@ def response_class(self, cls): ubq._response_class = cls return ubq - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "AsyncUpdateByQuery": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -97,7 +99,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script(self, **kwargs): + def script(self, **kwargs: Any) -> Any: """ Define update action to take: @@ -118,7 +120,7 @@ def script(self, **kwargs): ubq._script.update(kwargs) return ubq - def to_dict(self, **kwargs): + def to_dict(self, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request'ubq body. @@ -136,7 +138,7 @@ def to_dict(self, **kwargs): d.update(recursive_to_dict(kwargs)) return d - async def execute(self): + async def execute(self) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. diff --git a/opensearchpy/_async/helpers/update_by_query.pyi b/opensearchpy/_async/helpers/update_by_query.pyi deleted file mode 100644 index 57d692c6..00000000 --- a/opensearchpy/_async/helpers/update_by_query.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.search import Request - -class AsyncUpdateByQuery(Request): ... diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index cab7782e..34819970 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -30,8 +30,9 @@ import os import ssl import warnings +from typing import Any, Collection, Mapping, Optional, Union -import urllib3 # type: ignore +import urllib3 from ..compat import reraise_exceptions, urlencode from ..connection.base import Connection @@ -41,12 +42,9 @@ ImproperlyConfigured, SSLError, ) -from ._extra_imports import aiohttp, aiohttp_exceptions, yarl +from ._extra_imports import aiohttp, aiohttp_exceptions, yarl # type: ignore from .compat import get_running_loop -# sentinel value for `verify_certs`. -# This is used to detect if a user is passing in a value -# for SSL kwargs if also using an SSLContext. VERIFY_CERTS_DEFAULT = object() SSL_SHOW_WARN_DEFAULT = object() @@ -56,45 +54,48 @@ class AsyncConnection(Connection): async def perform_request( self, - method, - url, - params=None, - body=None, - timeout=None, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: raise NotImplementedError() - async def close(self): + async def close(self) -> None: raise NotImplementedError() class AIOHttpConnection(AsyncConnection): + session: Optional[aiohttp.ClientSession] + ssl_assert_fingerprint: Optional[str] + def __init__( self, - host="localhost", - port=None, - url_prefix="", - timeout=10, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_fingerprint=None, - maxsize=10, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - loop=None, - trust_env=False, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + url_prefix: str = "", + timeout: int = 10, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_fingerprint: Any = None, + maxsize: Optional[int] = 10, + headers: Any = None, + ssl_context: Any = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + loop: Any = None, + trust_env: Optional[bool] = False, + **kwargs: Any + ) -> None: """ Default connection class for ``AsyncOpenSearch`` using the `aiohttp` library and the http protocol. @@ -224,8 +225,15 @@ def __init__( self._trust_env = trust_env async def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: if self.session is None: await self._create_aiohttp_session() assert self.session is not None @@ -346,14 +354,14 @@ async def perform_request( return response.status, response.headers, raw_data - async def close(self): + async def close(self) -> Any: """ Explicitly closes connection """ if self.session: await self.session.close() - async def _create_aiohttp_session(self): + async def _create_aiohttp_session(self) -> Any: """Creates an aiohttp.ClientSession(). This is delayed until the first call to perform_request() so that AsyncTransport has a chance to set AIOHttpConnection.loop @@ -374,9 +382,9 @@ async def _create_aiohttp_session(self): ) -class OpenSearchClientResponse(aiohttp.ClientResponse): - async def text(self, encoding=None, errors="strict"): +class OpenSearchClientResponse(aiohttp.ClientResponse): # type: ignore + async def text(self, encoding: Any = None, errors: str = "strict") -> Any: if self._body is None: await self.read() - return self._body.decode("utf-8", "surrogatepass") + return self._body.decode("utf-8", "surrogatepass") # type: ignore diff --git a/opensearchpy/_async/http_aiohttp.pyi b/opensearchpy/_async/http_aiohttp.pyi deleted file mode 100644 index d641a5eb..00000000 --- a/opensearchpy/_async/http_aiohttp.pyi +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from asyncio import AbstractEventLoop -from typing import Any, Collection, Mapping, Optional, Tuple, Union - -from ..connection import Connection -from ._extra_imports import aiohttp # type: ignore - -class AsyncConnection(Connection): - async def perform_request( # type: ignore - self, - method: str, - url: str, - params: Optional[Mapping[str, Any]] = ..., - body: Optional[bytes] = ..., - timeout: Optional[Union[int, float]] = ..., - ignore: Collection[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ) -> Tuple[int, Mapping[str, str], str]: ... - async def close(self) -> None: ... - -class AIOHttpConnection(AsyncConnection): - session: Optional[aiohttp.ClientSession] - ssl_assert_fingerprint: Optional[str] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - url_prefix: str = ..., - timeout: int = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: int = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - loop: Optional[AbstractEventLoop] = ..., - trust_env: bool = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/_async/plugins/__init__.pyi b/opensearchpy/_async/plugins/__init__.pyi deleted file mode 100644 index 22c54ac8..00000000 --- a/opensearchpy/_async/plugins/__init__.pyi +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. diff --git a/opensearchpy/_async/plugins/alerting.py b/opensearchpy/_async/plugins/alerting.py index be79ed02..f1cf3ac9 100644 --- a/opensearchpy/_async/plugins/alerting.py +++ b/opensearchpy/_async/plugins/alerting.py @@ -8,12 +8,19 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any, Union + from ..client.utils import NamespacedClient, _make_path, query_params class AlertingClient(NamespacedClient): @query_params() - async def search_monitor(self, body, params=None, headers=None): + async def search_monitor( + self, + body: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the search result for a monitor. @@ -28,7 +35,12 @@ async def search_monitor(self, body, params=None, headers=None): ) @query_params() - async def get_monitor(self, monitor_id, params=None, headers=None): + async def get_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the details of a specific monitor. @@ -42,7 +54,12 @@ async def get_monitor(self, monitor_id, params=None, headers=None): ) @query_params("dryrun") - async def run_monitor(self, monitor_id, params=None, headers=None): + async def run_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Runs/Executes a specific monitor. @@ -57,7 +74,12 @@ async def run_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - async def create_monitor(self, body=None, params=None, headers=None): + async def create_monitor( + self, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Creates a monitor with inputs, triggers, and actions. @@ -72,7 +94,13 @@ async def create_monitor(self, body=None, params=None, headers=None): ) @query_params() - async def update_monitor(self, monitor_id, body=None, params=None, headers=None): + async def update_monitor( + self, + monitor_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Updates a monitor's inputs, triggers, and actions. @@ -88,7 +116,12 @@ async def update_monitor(self, monitor_id, body=None, params=None, headers=None) ) @query_params() - async def delete_monitor(self, monitor_id, params=None, headers=None): + async def delete_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Deletes a specific monitor. @@ -102,7 +135,12 @@ async def delete_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - async def get_destination(self, destination_id=None, params=None, headers=None): + async def get_destination( + self, + destination_id: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the details of a specific destination. @@ -118,7 +156,12 @@ async def get_destination(self, destination_id=None, params=None, headers=None): ) @query_params() - async def create_destination(self, body=None, params=None, headers=None): + async def create_destination( + self, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Creates a destination for slack, mail, or custom-webhook. @@ -134,8 +177,12 @@ async def create_destination(self, body=None, params=None, headers=None): @query_params() async def update_destination( - self, destination_id, body=None, params=None, headers=None - ): + self, + destination_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Updates a destination's inputs, triggers, and actions. @@ -151,7 +198,12 @@ async def update_destination( ) @query_params() - async def delete_destination(self, destination_id, params=None, headers=None): + async def delete_destination( + self, + destination_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Deletes a specific destination. @@ -165,7 +217,9 @@ async def delete_destination(self, destination_id, params=None, headers=None): ) @query_params() - async def get_alerts(self, params=None, headers=None): + async def get_alerts( + self, params: Union[Any, None] = None, headers: Union[Any, None] = None + ) -> Union[bool, Any]: """ Returns all alerts. @@ -178,7 +232,13 @@ async def get_alerts(self, params=None, headers=None): ) @query_params() - async def acknowledge_alert(self, monitor_id, body=None, params=None, headers=None): + async def acknowledge_alert( + self, + monitor_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Acknowledges an alert. diff --git a/opensearchpy/_async/plugins/alerting.pyi b/opensearchpy/_async/plugins/alerting.pyi deleted file mode 100644 index 7629df93..00000000 --- a/opensearchpy/_async/plugins/alerting.pyi +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient - -class AlertingClient(NamespacedClient): - def search_monitor( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def run_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_monitor( - self, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_monitor( - self, - monitor_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_destination( - self, - destination_id: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_destination( - self, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_destination( - self, - destination_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_destination( - self, - destination_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_alerts( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def acknowledge_alert( - self, - monitor_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/plugins/index_management.py b/opensearchpy/_async/plugins/index_management.py index ea654bc2..bbca4e2f 100644 --- a/opensearchpy/_async/plugins/index_management.py +++ b/opensearchpy/_async/plugins/index_management.py @@ -9,12 +9,16 @@ # GitHub history for details. +from typing import Any + from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndexManagementClient(NamespacedClient): @query_params() - async def put_policy(self, policy, body=None, params=None, headers=None): + async def put_policy( + self, policy: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates, or updates, a policy. @@ -32,7 +36,9 @@ async def put_policy(self, policy, body=None, params=None, headers=None): ) @query_params() - async def add_policy(self, index, body=None, params=None, headers=None): + async def add_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Adds a policy to an index. This operation does not change the policy if the index already has one. @@ -50,7 +56,9 @@ async def add_policy(self, index, body=None, params=None, headers=None): ) @query_params() - async def get_policy(self, policy, params=None, headers=None): + async def get_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Gets the policy by `policy_id`. @@ -67,7 +75,9 @@ async def get_policy(self, policy, params=None, headers=None): ) @query_params() - async def remove_policy_from_index(self, index, params=None, headers=None): + async def remove_policy_from_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Removes any ISM policy from the index. @@ -84,7 +94,9 @@ async def remove_policy_from_index(self, index, params=None, headers=None): ) @query_params() - async def change_policy(self, index, body=None, params=None, headers=None): + async def change_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates the managed index policy to a new policy (or to a new version of the policy). @@ -102,7 +114,9 @@ async def change_policy(self, index, body=None, params=None, headers=None): ) @query_params() - async def retry(self, index, body=None, params=None, headers=None): + async def retry( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Retries the failed action for an index. @@ -120,7 +134,9 @@ async def retry(self, index, body=None, params=None, headers=None): ) @query_params("show_policy") - async def explain_index(self, index, params=None, headers=None): + async def explain_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Gets the current state of the index. @@ -137,7 +153,9 @@ async def explain_index(self, index, params=None, headers=None): ) @query_params() - async def delete_policy(self, policy, params=None, headers=None): + async def delete_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes the policy by `policy_id`. diff --git a/opensearchpy/_async/plugins/index_management.pyi b/opensearchpy/_async/plugins/index_management.pyi deleted file mode 100644 index 98d50097..00000000 --- a/opensearchpy/_async/plugins/index_management.pyi +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class IndexManagementClient(NamespacedClient): - async def put_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def add_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def get_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def remove_policy_from_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def change_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def retry( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def explain_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def delete_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index bf1b77d6..854f0a06 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -30,6 +30,10 @@ import logging import sys from itertools import chain +from typing import Any, Collection, Mapping, Optional, Type, Union + +from opensearchpy.connection.base import Connection +from opensearchpy.serializer import Serializer from ..connection_pool import ConnectionPool from ..exceptions import ( @@ -56,25 +60,27 @@ class AsyncTransport(Transport): DEFAULT_CONNECTION_CLASS = AIOHttpConnection + sniffing_task: Any = None + def __init__( self, - hosts, - connection_class=None, - connection_pool_class=ConnectionPool, - host_info_callback=get_host_info, - sniff_on_start=False, - sniffer_timeout=None, - sniff_timeout=0.1, - sniff_on_connection_fail=False, - serializer=JSONSerializer(), - serializers=None, - default_mimetype="application/json", - max_retries=3, - retry_on_status=(502, 503, 504), - retry_on_timeout=False, - send_get_body_as="GET", - **kwargs - ): + hosts: Any, + connection_class: Any = None, + connection_pool_class: Type[ConnectionPool] = ConnectionPool, + host_info_callback: Any = get_host_info, + sniff_on_start: bool = False, + sniffer_timeout: Any = None, + sniff_timeout: float = 0.1, + sniff_on_connection_fail: bool = False, + serializer: Serializer = JSONSerializer(), + serializers: Any = None, + default_mimetype: str = "application/json", + max_retries: int = 3, + retry_on_status: Any = (502, 503, 504), + retry_on_timeout: bool = False, + send_get_body_as: str = "GET", + **kwargs: Any + ) -> None: """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance @@ -113,9 +119,9 @@ def __init__( options provided as part of the hosts parameter. """ self.sniffing_task = None - self.loop = None + self.loop: Any = None self._async_init_called = False - self._sniff_on_start_event = None # type: asyncio.Event + self._sniff_on_start_event: Optional[asyncio.Event] = None super(AsyncTransport, self).__init__( hosts=[], @@ -142,7 +148,7 @@ def __init__( self.hosts = hosts self.sniff_on_start = sniff_on_start - async def _async_init(self): + async def _async_init(self) -> None: """This is our stand-in for an async constructor. Everything that was deferred within __init__() should be done here now. @@ -171,7 +177,7 @@ async def _async_init(self): # Since this is the first one we wait for it to complete # in case there's an error it'll get raised here. - await self.sniffing_task + await self.sniffing_task # type: ignore # If the task gets cancelled here it likely means the # transport got closed. @@ -184,7 +190,7 @@ async def _async_init(self): finally: self._sniff_on_start_event.set() - async def _async_call(self): + async def _async_call(self) -> None: """This method is called within any async method of AsyncTransport where the transport is not closing. This will check to see if we should call our _async_init() or create a new sniffing task @@ -205,7 +211,7 @@ async def _async_call(self): if self.loop.time() >= self.last_sniff + self.sniffer_timeout: self.create_sniff_task() - async def _get_node_info(self, conn, initial): + async def _get_node_info(self, conn: Any, initial: Any) -> Any: try: # use small timeout for the sniffing request, should be a fast api call _, headers, node_info = await conn.perform_request( @@ -218,7 +224,7 @@ async def _get_node_info(self, conn, initial): pass return None - async def _get_sniff_data(self, initial=False): + async def _get_sniff_data(self, initial: Any = False) -> Any: previous_sniff = self.last_sniff # reset last_sniff timestamp @@ -227,7 +233,7 @@ async def _get_sniff_data(self, initial=False): # use small timeout for the sniffing request, should be a fast api call timeout = self.sniff_timeout if not initial else None - def _sniff_request(conn): + def _sniff_request(conn: Any) -> Any: return self.loop.create_task( conn.perform_request("GET", "/_nodes/_all/http", timeout=timeout) ) @@ -243,7 +249,7 @@ def _sniff_request(conn): continue tasks.append(_sniff_request(conn)) - done = () + done: Any = () try: while tasks: # The 'loop' keyword is deprecated in 3.8+ so don't @@ -283,7 +289,7 @@ def _sniff_request(conn): for task in chain(done, tasks): task.cancel() - async def sniff_hosts(self, initial=False): + async def sniff_hosts(self, initial: bool = False) -> Any: """Either spawns a sniffing_task which does regular sniffing over time or does a single sniffing session and awaits the results. """ @@ -294,7 +300,7 @@ async def sniff_hosts(self, initial=False): return node_info = await self._get_sniff_data(initial) - hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) + hosts: Any = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes, maybe using an incompatible # transport_schema or host_info_callback blocked all - raise error. @@ -311,7 +317,7 @@ async def sniff_hosts(self, initial=False): if c not in self.connection_pool.connections: await c.close() - def create_sniff_task(self, initial=False): + def create_sniff_task(self, initial: bool = False) -> None: """ Initiate a sniffing task. Make sure we only have one sniff request running at any given time. If a finished sniffing request is around, @@ -327,7 +333,7 @@ def create_sniff_task(self, initial=False): if self.sniffing_task is None: self.sniffing_task = self.loop.create_task(self.sniff_hosts(initial)) - def mark_dead(self, connection): + def mark_dead(self, connection: Connection) -> None: """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. @@ -338,10 +344,19 @@ def mark_dead(self, connection): if self.sniff_on_connection_fail: self.create_sniff_task() - def get_connection(self): + def get_connection(self) -> Any: return self.connection_pool.get_connection() - async def perform_request(self, method, url, headers=None, params=None, body=None): + async def perform_request( + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: """ Perform the actual request. Retrieve a connection from the connection pool, pass all the information to its perform_request method and @@ -425,7 +440,7 @@ async def perform_request(self, method, url, headers=None, params=None, body=Non ) return data - async def close(self): + async def close(self) -> None: """ Explicitly closes connections """ @@ -439,3 +454,6 @@ async def close(self): for connection in self.connection_pool.connections: await connection.close() + + +__all__ = ["TransportError"] diff --git a/opensearchpy/_async/transport.pyi b/opensearchpy/_async/transport.pyi deleted file mode 100644 index 5d66514d..00000000 --- a/opensearchpy/_async/transport.pyi +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union - -from ..connection import Connection -from ..connection_pool import ConnectionPool -from ..serializer import Deserializer, Serializer - -class AsyncTransport(object): - DEFAULT_CONNECTION_CLASS: Type[Connection] - connection_pool: ConnectionPool - deserializer: Deserializer - - max_retries: int - retry_on_timeout: bool - retry_on_status: Collection[int] - send_get_body_as: str - serializer: Serializer - connection_pool_class: Type[ConnectionPool] - connection_class: Type[Connection] - kwargs: Any - hosts: Optional[List[Dict[str, Any]]] - seed_connections: List[Connection] - sniffer_timeout: Optional[float] - sniff_on_start: bool - sniff_on_connection_fail: bool - last_sniff: float - sniff_timeout: Optional[float] - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Dict[str, Any] - ] - def __init__( - self, - hosts: Any, - connection_class: Optional[Type[Any]] = ..., - connection_pool_class: Type[ConnectionPool] = ..., - host_info_callback: Callable[ - [Dict[str, Any], Dict[str, Any]], Optional[Dict[str, Any]] - ] = ..., - sniff_on_start: bool = ..., - sniffer_timeout: Optional[float] = ..., - sniff_timeout: float = ..., - sniff_on_connection_fail: bool = ..., - serializer: Serializer = ..., - serializers: Optional[Mapping[str, Serializer]] = ..., - default_mimetype: str = ..., - max_retries: int = ..., - retry_on_status: Collection[int] = ..., - retry_on_timeout: bool = ..., - send_get_body_as: str = ..., - **kwargs: Any - ) -> None: ... - def add_connection(self, host: Any) -> None: ... - def set_connections(self, hosts: Collection[Any]) -> None: ... - def get_connection(self) -> Connection: ... - def sniff_hosts(self, initial: bool = ...) -> None: ... - def mark_dead(self, connection: Connection) -> None: ... - async def perform_request( - self, - method: str, - url: str, - headers: Optional[Mapping[str, str]] = ..., - params: Optional[Mapping[str, Any]] = ..., - body: Optional[Any] = ..., - ) -> Union[bool, Any]: ... - async def close(self) -> None: ... diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 2410b9f5..13c8d5c9 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -25,4 +25,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__ = "2.3.2" +__versionstr__: str = "2.3.2" diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 6a8dffb6..05af6764 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -39,9 +39,11 @@ from __future__ import unicode_literals import logging +from typing import Any, Type from ..transport import Transport, TransportError from .cat import CatClient +from .client import Client from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient @@ -54,12 +56,12 @@ from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient -from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params +from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") -class OpenSearch(object): +class OpenSearch(Client): """ OpenSearch client. Provides a straightforward mapping from Python to OpenSearch REST endpoints. @@ -184,13 +186,19 @@ def default(self, obj): """ - from ._patch import ( + # include PIT functions inside _patch.py + from ._patch import ( # type: ignore create_point_in_time, delete_point_in_time, list_all_point_in_time, ) - def __init__(self, hosts=None, transport_class=Transport, **kwargs): + def __init__( + self, + hosts: Any = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), @@ -205,7 +213,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be :class:`~opensearchpy.Transport` class and, subsequently, to the :class:`~opensearchpy.Connection` instances. """ - self.transport = transport_class(_normalize_hosts(hosts), **kwargs) + super().__init__(hosts, transport_class, **kwargs) # namespaced clients for compatibility with API names self.cat = CatClient(self) @@ -224,10 +232,10 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.plugins = PluginsClient(self) - def __repr__(self): + def __repr__(self) -> Any: try: # get a list of all connections - cons = self.transport.hosts + cons: Any = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] @@ -236,21 +244,25 @@ def __repr__(self): # probably operating on custom transport and connection_pool, ignore return super(OpenSearch, self).__repr__() - def __enter__(self): + def __enter__(self) -> Any: if hasattr(self.transport, "_async_call"): self.transport._async_call() return self - def __exit__(self, *_): + def __exit__(self, *_: Any) -> None: self.close() - def close(self): + def close(self) -> None: """Closes the Transport and all internal connections""" self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params() - def ping(self, params=None, headers=None): + def ping( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns whether the cluster is running. @@ -263,7 +275,11 @@ def ping(self, params=None, headers=None): return False @query_params() - def info(self, params=None, headers=None): + def info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the cluster. @@ -281,7 +297,14 @@ def info(self, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def create(self, index, id, body, params=None, headers=None): + def create( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. @@ -330,7 +353,14 @@ def create(self, index, id, body, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def index(self, index, body, id=None, params=None, headers=None): + def index( + self, + index: Any, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a document in an index. @@ -387,7 +417,13 @@ def index(self, index, body, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def bulk(self, body, index=None, params=None, headers=None): + def bulk( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to perform multiple index/update/delete operations in a single request. @@ -431,7 +467,13 @@ def bulk(self, body, index=None, params=None, headers=None): ) @query_params() - def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): + def clear_scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Explicitly clears the search context for a scroll. @@ -467,7 +509,13 @@ def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): "routing", "terminate_after", ) - def count(self, body=None, index=None, params=None, headers=None): + def count( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns number of documents matching a query. @@ -523,7 +571,13 @@ def count(self, body=None, index=None, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def delete(self, index, id, params=None, headers=None): + def delete( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes a document from the index. @@ -592,7 +646,13 @@ def delete(self, index, id, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def delete_by_query(self, index, body, params=None, headers=None): + def delete_by_query( + self, + index: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes documents matching the provided query. @@ -685,7 +745,12 @@ def delete_by_query(self, index, body, params=None, headers=None): ) @query_params("requests_per_second") - def delete_by_query_rethrottle(self, task_id, params=None, headers=None): + def delete_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Delete By Query operation. @@ -706,7 +771,12 @@ def delete_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_script(self, id, params=None, headers=None): + def delete_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a script. @@ -738,7 +808,13 @@ def delete_script(self, id, params=None, headers=None): "version", "version_type", ) - def exists(self, index, id, params=None, headers=None): + def exists( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document exists in an index. @@ -783,7 +859,13 @@ def exists(self, index, id, params=None, headers=None): "version", "version_type", ) - def exists_source(self, index, id, params=None, headers=None): + def exists_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document source exists in an index. @@ -831,7 +913,14 @@ def exists_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - def explain(self, index, id, body=None, params=None, headers=None): + def explain( + self, + index: Any, + id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about why a specific matches (or doesn't match) a query. @@ -878,7 +967,13 @@ def explain(self, index, id, body=None, params=None, headers=None): "ignore_unavailable", "include_unmapped", ) - def field_caps(self, body=None, index=None, params=None, headers=None): + def field_caps( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about the capabilities of fields among multiple indices. @@ -919,7 +1014,13 @@ def field_caps(self, body=None, index=None, params=None, headers=None): "version", "version_type", ) - def get(self, index, id, params=None, headers=None): + def get( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a document. @@ -954,7 +1055,12 @@ def get(self, index, id, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout") - def get_script(self, id, params=None, headers=None): + def get_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a script. @@ -984,7 +1090,13 @@ def get_script(self, id, params=None, headers=None): "version", "version_type", ) - def get_source(self, index, id, params=None, headers=None): + def get_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the source of a document. @@ -1028,7 +1140,13 @@ def get_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - def mget(self, body, index=None, params=None, headers=None): + def mget( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to get multiple documents in one request. @@ -1073,7 +1191,13 @@ def mget(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def msearch(self, body, index=None, params=None, headers=None): + def msearch( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search operations in one request. @@ -1125,7 +1249,13 @@ def msearch(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def msearch_template(self, body, index=None, params=None, headers=None): + def msearch_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search template operations in one request. @@ -1173,7 +1303,13 @@ def msearch_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - def mtermvectors(self, body=None, index=None, params=None, headers=None): + def mtermvectors( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns multiple termvectors in one request. @@ -1221,7 +1357,14 @@ def mtermvectors(self, body=None, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def put_script(self, id, body, context=None, params=None, headers=None): + def put_script( + self, + id: Any, + body: Any, + context: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a script. @@ -1251,7 +1394,13 @@ def put_script(self, id, body, context=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" ) - def rank_eval(self, body, index=None, params=None, headers=None): + def rank_eval( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to evaluate the quality of ranked search results over a set of typical search queries. @@ -1293,7 +1442,12 @@ def rank_eval(self, body, index=None, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def reindex(self, body, params=None, headers=None): + def reindex( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or @@ -1330,7 +1484,12 @@ def reindex(self, body, params=None, headers=None): ) @query_params("requests_per_second") - def reindex_rethrottle(self, task_id, params=None, headers=None): + def reindex_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Reindex operation. @@ -1350,7 +1509,13 @@ def reindex_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - def render_search_template(self, body=None, id=None, params=None, headers=None): + def render_search_template( + self, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1367,7 +1532,12 @@ def render_search_template(self, body=None, id=None, params=None, headers=None): ) @query_params() - def scripts_painless_execute(self, body=None, params=None, headers=None): + def scripts_painless_execute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows an arbitrary script to be executed and a result to be returned. @@ -1383,7 +1553,13 @@ def scripts_painless_execute(self, body=None, params=None, headers=None): ) @query_params("rest_total_hits_as_int", "scroll") - def scroll(self, body=None, scroll_id=None, params=None, headers=None): + def scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to retrieve a large numbers of results from a single search request. @@ -1452,7 +1628,13 @@ def scroll(self, body=None, scroll_id=None, params=None, headers=None): "typed_keys", "version", ) - def search(self, body=None, index=None, params=None, headers=None): + def search( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns results matching a query. @@ -1572,7 +1754,12 @@ def search(self, body=None, index=None, params=None, headers=None): "preference", "routing", ) - def search_shards(self, index=None, params=None, headers=None): + def search_shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the indices and shards that a search request would be executed against. @@ -1613,7 +1800,13 @@ def search_shards(self, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def search_template(self, body, index=None, params=None, headers=None): + def search_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. @@ -1675,7 +1868,14 @@ def search_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - def termvectors(self, index, body=None, id=None, params=None, headers=None): + def termvectors( + self, + index: Any, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information and statistics about terms in the fields of a particular document. @@ -1730,7 +1930,14 @@ def termvectors(self, index, body=None, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def update(self, index, id, body, params=None, headers=None): + def update( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a document with a script or partial document. @@ -1812,7 +2019,13 @@ def update(self, index, id, body, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def update_by_query(self, index, body=None, params=None, headers=None): + def update_by_query( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. @@ -1906,7 +2119,12 @@ def update_by_query(self, index, body=None, params=None, headers=None): ) @query_params("requests_per_second") - def update_by_query_rethrottle(self, task_id, params=None, headers=None): + def update_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Update By Query operation. @@ -1927,7 +2145,11 @@ def update_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - def get_script_context(self, params=None, headers=None): + def get_script_context( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all script contexts. @@ -1937,7 +2159,11 @@ def get_script_context(self, params=None, headers=None): ) @query_params() - def get_script_languages(self, params=None, headers=None): + def get_script_languages( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns available script types, languages and contexts. @@ -1953,7 +2179,12 @@ def get_script_languages(self, params=None, headers=None): "preference", "routing", ) - def create_pit(self, index, params=None, headers=None): + def create_pit( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates point in time context. @@ -1981,7 +2212,11 @@ def create_pit(self, index, params=None, headers=None): ) @query_params() - def delete_all_pits(self, params=None, headers=None): + def delete_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes all active point in time searches. @@ -1991,7 +2226,12 @@ def delete_all_pits(self, params=None, headers=None): ) @query_params() - def delete_pit(self, body=None, params=None, headers=None): + def delete_pit( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes one or more point in time searches based on the IDs passed. @@ -2007,7 +2247,11 @@ def delete_pit(self, body=None, params=None, headers=None): ) @query_params() - def get_all_pits(self, params=None, headers=None): + def get_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Lists all active point in time searches. diff --git a/opensearchpy/client/__init__.pyi b/opensearchpy/client/__init__.pyi deleted file mode 100644 index 9ad72a83..00000000 --- a/opensearchpy/client/__init__.pyi +++ /dev/null @@ -1,1139 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from __future__ import unicode_literals - -import logging -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -from ..transport import Transport -from .cat import CatClient -from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient -from .features import FeaturesClient -from .indices import IndicesClient -from .ingest import IngestClient -from .nodes import NodesClient -from .remote import RemoteClient -from .remote_store import RemoteStoreClient -from .security import SecurityClient -from .snapshot import SnapshotClient -from .tasks import TasksClient - -logger: logging.Logger - -class OpenSearch(object): - transport: Transport - - cat: CatClient - cluster: ClusterClient - features: FeaturesClient - indices: IndicesClient - ingest: IngestClient - nodes: NodesClient - remote: RemoteClient - security: SecurityClient - snapshot: SnapshotClient - tasks: TasksClient - remote_store: RemoteStoreClient - def __init__( - self, - hosts: Any = ..., - transport_class: Type[Transport] = ..., - **kwargs: Any, - ) -> None: ... - def __repr__(self) -> str: ... - def __enter__(self) -> "OpenSearch": ... - def __exit__(self, *_: Any) -> None: ... - def close(self) -> None: ... - # AUTO-GENERATED-API-DEFINITIONS # - def ping( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create( - self, - index: Any, - id: Any, - *, - body: Any, - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def index( - self, - index: Any, - *, - body: Any, - id: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - op_type: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def bulk( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clear_scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def count( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - min_score: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - index: Any, - id: Any, - *, - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_by_query( - self, - index: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def exists_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def explain( - self, - index: Any, - id: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - lenient: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def field_caps( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_unmapped: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def mget( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def msearch( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def msearch_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def mtermvectors( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - ids: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_script( - self, - id: Any, - *, - body: Any, - context: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def rank_eval( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - search_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reindex( - self, - *, - body: Any, - max_docs: Optional[Any] = ..., - refresh: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - scroll: Optional[Any] = ..., - slices: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reindex_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def render_search_template( - self, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def scripts_painless_execute( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - scroll: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - allow_partial_search_results: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - batched_reduce_size: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - docvalue_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - seq_no_primary_term: Optional[Any] = ..., - size: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - suggest_field: Optional[Any] = ..., - suggest_mode: Optional[Any] = ..., - suggest_size: Optional[Any] = ..., - suggest_text: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - track_scores: Optional[Any] = ..., - track_total_hits: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - version: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search_shards( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - preference: Optional[Any] = ..., - profile: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def termvectors( - self, - index: Any, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update( - self, - index: Any, - id: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - lang: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - retry_on_conflict: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_by_query( - self, - index: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script_context( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script_languages( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_pit( - self, - index: Any, - *, - allow_partial_pit_creation: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_pit( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_all_pits( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/_patch.py b/opensearchpy/client/_patch.py index bbb69d52..3f156906 100644 --- a/opensearchpy/client/_patch.py +++ b/opensearchpy/client/_patch.py @@ -9,12 +9,13 @@ # GitHub history for details. import warnings +from typing import Any from .utils import SKIP_IN_PATH, query_params @query_params() -def list_all_point_in_time(self, params=None, headers=None): +def list_all_point_in_time(self: Any, params: Any = None, headers: Any = None) -> Any: """ Returns the list of active point in times searches @@ -35,7 +36,9 @@ def list_all_point_in_time(self, params=None, headers=None): @query_params( "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" ) -def create_point_in_time(self, index, params=None, headers=None): +def create_point_in_time( + self: Any, index: Any, params: Any = None, headers: Any = None +) -> Any: """ Create a point in time that can be used in subsequent searches @@ -68,7 +71,13 @@ def create_point_in_time(self, index, params=None, headers=None): @query_params() -def delete_point_in_time(self, body=None, all=False, params=None, headers=None): +def delete_point_in_time( + self: Any, + body: Any = None, + all: bool = False, + params: Any = None, + headers: Any = None, +) -> Any: """ Delete a point in time @@ -94,7 +103,7 @@ def delete_point_in_time(self, body=None, all=False, params=None, headers=None): @query_params() -def health_check(self, params=None, headers=None): +def health_check(self: Any, params: Any = None, headers: Any = None) -> Any: """ Checks to see if the Security plugin is up and running. @@ -113,7 +122,9 @@ def health_check(self, params=None, headers=None): @query_params() -def update_audit_config(self, body, params=None, headers=None): +def update_audit_config( + self: Any, body: Any, params: Any = None, headers: Any = None +) -> Any: """ A PUT call updates the audit configuration. diff --git a/opensearchpy/client/_patch.pyi b/opensearchpy/client/_patch.pyi deleted file mode 100644 index b1819682..00000000 --- a/opensearchpy/client/_patch.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -def list_all_point_in_time( - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -def create_point_in_time( - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -def delete_point_in_time( - *, - body: Optional[Any] = ..., - all: Optional[bool] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., -) -> Any: ... -def health_check( - params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... -def update_audit_config( - body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... -) -> Union[bool, Any]: ... diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 49d797ca..91adbf35 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -36,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") - def aliases(self, name=None, params=None, headers=None): + def aliases( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Shows information about currently configured aliases to indices including filter and routing infos. @@ -65,6 +72,20 @@ def aliases(self, name=None, params=None, headers=None): "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) + @query_params() + def all_pit_segments( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Lists all active point-in-time segments. + + """ + return self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + @query_params( "bytes", "cluster_manager_timeout", @@ -76,7 +97,12 @@ def aliases(self, name=None, params=None, headers=None): "s", "v", ) - def allocation(self, node_id=None, params=None, headers=None): + def allocation( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. @@ -108,8 +134,51 @@ def allocation(self, node_id=None, params=None, headers=None): headers=headers, ) + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + def cluster_manager( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about the cluster-manager node. + + + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return self.transport.perform_request( + "GET", "/_cat/cluster_manager", params=params, headers=headers + ) + @query_params("format", "h", "help", "s", "v") - def count(self, index=None, params=None, headers=None): + def count( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides quick access to the document count of the entire cluster, or individual indices. @@ -129,8 +198,43 @@ def count(self, index=None, params=None, headers=None): "GET", _make_path("_cat", "count", index), params=params, headers=headers ) + @query_params("bytes", "format", "h", "help", "s", "v") + def fielddata( + self, + fields: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Shows how much heap memory is currently being used by fielddata on every data + node in the cluster. + + + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return self.transport.perform_request( + "GET", + _make_path("_cat", "fielddata", fields), + params=params, + headers=headers, + ) + @query_params("format", "h", "help", "s", "time", "ts", "v") - def health(self, params=None, headers=None): + def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a concise representation of the cluster health. @@ -151,7 +255,11 @@ def health(self, params=None, headers=None): ) @query_params("help", "s") - def help(self, params=None, headers=None): + def help( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns help for the Cat APIs. @@ -180,7 +288,12 @@ def help(self, params=None, headers=None): "time", "v", ) - def indices(self, index=None, params=None, headers=None): + def indices( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... @@ -232,7 +345,11 @@ def indices(self, index=None, params=None, headers=None): "s", "v", ) - def master(self, params=None, headers=None): + def master( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the cluster-manager node. @@ -271,9 +388,13 @@ def master(self, params=None, headers=None): "s", "v", ) - def cluster_manager(self, params=None, headers=None): + def nodeattrs( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about the cluster-manager node. + Returns information about custom node attributes. :arg cluster_manager_timeout: Operation timeout for connection @@ -292,7 +413,7 @@ def cluster_manager(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/cluster_manager", params=params, headers=headers + "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( @@ -308,7 +429,11 @@ def cluster_manager(self, params=None, headers=None): "time", "v", ) - def nodes(self, params=None, headers=None): + def nodes( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic statistics about performance of cluster nodes. @@ -340,37 +465,6 @@ def nodes(self, params=None, headers=None): ) @query_params( - "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" - ) - def recovery(self, index=None, params=None, headers=None): - """ - Returns information about index shard recoveries, both on-going completed. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - shard recoveries. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg detailed: If `true`, the response includes detailed - information about shard recoveries. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return self.transport.perform_request( - "GET", _make_path("_cat", "recovery", index), params=params, headers=headers - ) - - @query_params( - "bytes", "cluster_manager_timeout", "format", "h", @@ -381,15 +475,15 @@ def recovery(self, index=None, params=None, headers=None): "time", "v", ) - def shards(self, index=None, params=None, headers=None): + def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides a detailed view of shard allocation on nodes. + Returns a concise representation of the cluster pending tasks. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -408,34 +502,52 @@ def shards(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", _make_path("_cat", "shards", index), params=params, headers=headers + "GET", "/_cat/pending_tasks", params=params, headers=headers + ) + + @query_params() + def pit_segments( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + List segments for one or several PITs. + + + """ + return self.transport.perform_request( + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body ) @query_params( - "bytes", "cluster_manager_timeout", "format", "h", "help", + "local", "master_timeout", "s", "v", ) - def segments(self, index=None, params=None, headers=None): + def plugins( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides low-level information about the segments in the shards of an index. + Returns information about installed plugins across nodes node. - :arg index: Comma-separated list of indices to limit the - returned information. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -444,36 +556,34 @@ def segments(self, index=None, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", _make_path("_cat", "segments", index), params=params, headers=headers + "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( - "cluster_manager_timeout", - "format", - "h", - "help", - "local", - "master_timeout", - "s", - "time", - "v", + "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" ) - def pending_tasks(self, params=None, headers=None): + def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns a concise representation of the cluster pending tasks. + Returns information about index shard recoveries, both on-going completed. - :arg cluster_manager_timeout: Operation timeout for connection - to cluster-manager node. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg detailed: If `true`, the response includes detailed + information about shard recoveries. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. - :arg master_timeout (Deprecated: To promote inclusive language, - use 'cluster_manager_timeout' instead.): Operation timeout for - connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. :arg time: The unit in which to display time values. Valid @@ -481,7 +591,7 @@ def pending_tasks(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/pending_tasks", params=params, headers=headers + "GET", _make_path("_cat", "recovery", index), params=params, headers=headers ) @query_params( @@ -492,17 +602,17 @@ def pending_tasks(self, params=None, headers=None): "local", "master_timeout", "s", - "size", "v", ) - def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): + def repositories( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns cluster-wide thread pool statistics per node. By default the active, - queue and rejected statistics are returned for all thread pools. + Returns information about snapshot repositories registered in the cluster. - :arg thread_pool_patterns: Comma-separated list of regular- - expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -516,65 +626,94 @@ def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. - :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", - _make_path("_cat", "thread_pool", thread_pool_patterns), - params=params, - headers=headers, + "GET", "/_cat/repositories", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - def fielddata(self, fields=None, params=None, headers=None): + @query_params( + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", + ) + def segment_replication( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Shows how much heap memory is currently being used by fielddata on every data - node in the cluster. + Returns information about both on-going and latest completed Segment + Replication events. - :arg fields: Comma-separated list of fields to return in the - output. + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. Default is false. :arg bytes: The unit in which to display byte values. Valid choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. Default is false. + :arg detailed: If `true`, the response includes detailed + information about segment replications. Default is false. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", - _make_path("_cat", "fielddata", fields), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", "help", - "local", "master_timeout", "s", "v", ) - def plugins(self, params=None, headers=None): + def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about installed plugins across nodes node. + Provides low-level information about the segments in the shards of an index. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, yaml. :arg h: Comma-separated list of column names to display. :arg help: Return help information. Default is false. - :arg local: Return local information, do not retrieve the state - from cluster-manager node. Default is false. :arg master_timeout (Deprecated: To promote inclusive language, use 'cluster_manager_timeout' instead.): Operation timeout for connection to master node. @@ -583,10 +722,11 @@ def plugins(self, params=None, headers=None): :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/plugins", params=params, headers=headers + "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "bytes", "cluster_manager_timeout", "format", "h", @@ -594,13 +734,23 @@ def plugins(self, params=None, headers=None): "local", "master_timeout", "s", + "time", "v", ) - def nodeattrs(self, params=None, headers=None): + def shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about custom node attributes. + Provides a detailed view of shard allocation on nodes. + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -614,10 +764,12 @@ def nodeattrs(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/nodeattrs", params=params, headers=headers + "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( @@ -628,13 +780,22 @@ def nodeattrs(self, params=None, headers=None): "local", "master_timeout", "s", + "size", "v", ) - def repositories(self, params=None, headers=None): + def thread_pool( + self, + thread_pool_patterns: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about snapshot repositories registered in the cluster. + Returns cluster-wide thread pool statistics per node. By default the active, + queue and rejected statistics are returned for all thread pools. + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. :arg cluster_manager_timeout: Operation timeout for connection to cluster-manager node. :arg format: A short version of the Accept header, e.g. json, @@ -648,10 +809,14 @@ def repositories(self, params=None, headers=None): connection to master node. :arg s: Comma-separated list of column names or column aliases to sort by. + :arg size: The multiplier in which to display values. :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/repositories", params=params, headers=headers + "GET", + _make_path("_cat", "thread_pool", thread_pool_patterns), + params=params, + headers=headers, ) @query_params( @@ -665,7 +830,12 @@ def repositories(self, params=None, headers=None): "time", "v", ) - def snapshots(self, repository=None, params=None, headers=None): + def snapshots( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all snapshots in a specific repository. @@ -708,7 +878,11 @@ def snapshots(self, repository=None, params=None, headers=None): "time", "v", ) - def tasks(self, params=None, headers=None): + def tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in the cluster. @@ -748,7 +922,12 @@ def tasks(self, params=None, headers=None): "s", "v", ) - def templates(self, name=None, params=None, headers=None): + def templates( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about existing templates. @@ -772,71 +951,3 @@ def templates(self, name=None, params=None, headers=None): return self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) - - @query_params() - def all_pit_segments(self, params=None, headers=None): - """ - Lists all active point-in-time segments. - - """ - return self.transport.perform_request( - "GET", "/_cat/pit_segments/_all", params=params, headers=headers - ) - - @query_params() - def pit_segments(self, body=None, params=None, headers=None): - """ - List segments for one or several PITs. - - - """ - return self.transport.perform_request( - "GET", "/_cat/pit_segments", params=params, headers=headers, body=body - ) - - @query_params( - "active_only", - "bytes", - "completed_only", - "detailed", - "format", - "h", - "help", - "s", - "shards", - "time", - "v", - ) - def segment_replication(self, index=None, params=None, headers=None): - """ - Returns information about both on-going and latest completed Segment - Replication events. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information. - :arg active_only: If `true`, the response only includes ongoing - segment replication events. Default is false. - :arg bytes: The unit in which to display byte values. Valid - choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. - :arg completed_only: If `true`, the response only includes - latest completed segment replication events. Default is false. - :arg detailed: If `true`, the response includes detailed - information about segment replications. Default is false. - :arg format: A short version of the Accept header, e.g. json, - yaml. - :arg h: Comma-separated list of column names to display. - :arg help: Return help information. Default is false. - :arg s: Comma-separated list of column names or column aliases - to sort by. - :arg shards: Comma-separated list of shards to display. - :arg time: The unit in which to display time values. Valid - choices are d, h, m, s, ms, micros, nanos. - :arg v: Verbose mode. Display column headers. Default is false. - """ - return self.transport.perform_request( - "GET", - _make_path("_cat", "segment_replication", index), - params=params, - headers=headers, - ) diff --git a/opensearchpy/client/cat.pyi b/opensearchpy/client/cat.pyi deleted file mode 100644 index 0d690dda..00000000 --- a/opensearchpy/client/cat.pyi +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class CatClient(NamespacedClient): - def aliases( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def allocation( - self, - *, - node_id: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def count( - self, - *, - index: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def health( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - ts: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def help( - self, - *, - help: Optional[Any] = ..., - s: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def indices( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - health: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pri: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def master( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cluster_manager( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def nodes( - self, - *, - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - full_id: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shards( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segments( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def thread_pool( - self, - *, - thread_pool_patterns: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def fielddata( - self, - *, - fields: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def plugins( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def nodeattrs( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def repositories( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def snapshots( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def tasks( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def templates( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def all_pit_segments( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pit_segments( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segment_replication( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - completed_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - shards: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/client.py b/opensearchpy/client/client.py new file mode 100644 index 00000000..7f0b67c6 --- /dev/null +++ b/opensearchpy/client/client.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Optional, Type + +from opensearchpy.client.utils import _normalize_hosts +from opensearchpy.transport import Transport + + +class Client(object): + """ + A generic async OpenSearch client. + """ + + def __init__( + self, + hosts: Optional[str] = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: + """ + :arg hosts: list of nodes, or a single node, we should connect to. + Node should be a dictionary ({"host": "localhost", "port": 9200}), + the entire dictionary will be passed to the :class:`~opensearchpy.Connection` + class as kwargs, or a string in the format of ``host[:port]`` which will be + translated to a dictionary automatically. If no value is given the + :class:`~opensearchpy.Connection` class defaults will be used. + + :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. + + :arg kwargs: any additional arguments will be passed on to the + :class:`~opensearchpy.Transport` class and, subsequently, to the + :class:`~opensearchpy.Connection` instances. + """ + self.transport = transport_class(_normalize_hosts(hosts), **kwargs) diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index 248c7ce3..f2770f2d 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -55,7 +57,12 @@ class ClusterClient(NamespacedClient): "wait_for_nodes", "wait_for_status", ) - def health(self, index=None, params=None, headers=None): + def health( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the health of the cluster. @@ -99,7 +106,11 @@ def health(self, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def pending_tasks(self, params=None, headers=None): + def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. @@ -128,7 +139,13 @@ def pending_tasks(self, params=None, headers=None): "wait_for_metadata_version", "wait_for_timeout", ) - def state(self, metric=None, index=None, params=None, headers=None): + def state( + self, + metric: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a comprehensive information about the state of the cluster. @@ -171,7 +188,12 @@ def state(self, metric=None, index=None, params=None, headers=None): ) @query_params("flat_settings", "timeout") - def stats(self, node_id=None, params=None, headers=None): + def stats( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns high-level overview of cluster statistics. @@ -202,7 +224,12 @@ def stats(self, node_id=None, params=None, headers=None): "retry_failed", "timeout", ) - def reroute(self, body=None, params=None, headers=None): + def reroute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. @@ -235,7 +262,11 @@ def reroute(self, body=None, params=None, headers=None): "master_timeout", "timeout", ) - def get_settings(self, params=None, headers=None): + def get_settings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns cluster settings. @@ -258,7 +289,12 @@ def get_settings(self, params=None, headers=None): @query_params( "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) - def put_settings(self, body, params=None, headers=None): + def put_settings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster settings. @@ -282,7 +318,11 @@ def put_settings(self, body, params=None, headers=None): ) @query_params() - def remote_info(self, params=None, headers=None): + def remote_info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about configured remote clusters. @@ -292,7 +332,12 @@ def remote_info(self, params=None, headers=None): ) @query_params("include_disk_info", "include_yes_decisions") - def allocation_explain(self, body=None, params=None, headers=None): + def allocation_explain( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -313,7 +358,12 @@ def allocation_explain(self, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_component_template(self, name, params=None, headers=None): + def delete_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a component template. @@ -337,7 +387,12 @@ def delete_component_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def get_component_template(self, name=None, params=None, headers=None): + def get_component_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns one or more component templates. @@ -359,7 +414,13 @@ def get_component_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") - def put_component_template(self, name, body, params=None, headers=None): + def put_component_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a component template. @@ -388,7 +449,12 @@ def put_component_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def exists_component_template(self, name, params=None, headers=None): + def exists_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular component template exist. @@ -413,7 +479,11 @@ def exists_component_template(self, name, params=None, headers=None): ) @query_params("wait_for_removal") - def delete_voting_config_exclusions(self, params=None, headers=None): + def delete_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears cluster voting config exclusions. @@ -430,7 +500,11 @@ def delete_voting_config_exclusions(self, params=None, headers=None): ) @query_params("node_ids", "node_names", "timeout") - def post_voting_config_exclusions(self, params=None, headers=None): + def post_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. @@ -448,7 +522,11 @@ def post_voting_config_exclusions(self, params=None, headers=None): ) @query_params() - def delete_decommission_awareness(self, params=None, headers=None): + def delete_decommission_awareness( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete any existing decommission. @@ -461,7 +539,11 @@ def delete_decommission_awareness(self, params=None, headers=None): ) @query_params() - def delete_weighted_routing(self, params=None, headers=None): + def delete_weighted_routing( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete weighted shard routing weights. @@ -475,8 +557,11 @@ def delete_weighted_routing(self, params=None, headers=None): @query_params() def get_decommission_awareness( - self, awareness_attribute_name, params=None, headers=None - ): + self, + awareness_attribute_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Get details and status of decommissioned attribute. @@ -502,7 +587,12 @@ def get_decommission_awareness( ) @query_params() - def get_weighted_routing(self, attribute, params=None, headers=None): + def get_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Fetches weighted shard routing weights. @@ -522,11 +612,11 @@ def get_weighted_routing(self, attribute, params=None, headers=None): @query_params() def put_decommission_awareness( self, - awareness_attribute_name, - awareness_attribute_value, - params=None, - headers=None, - ): + awareness_attribute_name: Any, + awareness_attribute_value: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Decommissions an awareness attribute. @@ -552,7 +642,12 @@ def put_decommission_awareness( ) @query_params() - def put_weighted_routing(self, attribute, params=None, headers=None): + def put_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates weighted shard routing weights. diff --git a/opensearchpy/client/cluster.pyi b/opensearchpy/client/cluster.pyi deleted file mode 100644 index ad2d3fac..00000000 --- a/opensearchpy/client/cluster.pyi +++ /dev/null @@ -1,456 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class ClusterClient(NamespacedClient): - def health( - self, - *, - index: Optional[Any] = ..., - awareness_attribute: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - level: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_events: Optional[Any] = ..., - wait_for_no_initializing_shards: Optional[Any] = ..., - wait_for_no_relocating_shards: Optional[Any] = ..., - wait_for_nodes: Optional[Any] = ..., - wait_for_status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pending_tasks( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def state( - self, - *, - metric: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_metadata_version: Optional[Any] = ..., - wait_for_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - node_id: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reroute( - self, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - explain: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - metric: Optional[Any] = ..., - retry_failed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_settings( - self, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_settings( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def remote_info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def allocation_explain( - self, - *, - body: Optional[Any] = ..., - include_disk_info: Optional[Any] = ..., - include_yes_decisions: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_component_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_component_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_component_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def delete_voting_config_exclusions( - self, - *, - wait_for_removal: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def post_voting_config_exclusions( - self, - *, - node_ids: Optional[Any] = ..., - node_names: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_decommission_awareness( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_weighted_routing( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_decommission_awareness( - self, - awareness_attribute_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_decommission_awareness( - self, - awareness_attribute_name: Any, - awareness_attribute_value: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_weighted_routing( - self, - attribute: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index adc4aea3..8617708e 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -36,6 +36,8 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -43,7 +45,12 @@ class DanglingIndicesClient(NamespacedClient): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - def delete_dangling_index(self, index_uuid, params=None, headers=None): + def delete_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes the specified dangling index. @@ -71,7 +78,12 @@ def delete_dangling_index(self, index_uuid, params=None, headers=None): @query_params( "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - def import_dangling_index(self, index_uuid, params=None, headers=None): + def import_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Imports the specified dangling index. @@ -94,7 +106,11 @@ def import_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params() - def list_dangling_indices(self, params=None, headers=None): + def list_dangling_indices( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/client/dangling_indices.pyi b/opensearchpy/client/dangling_indices.pyi deleted file mode 100644 index b48ba830..00000000 --- a/opensearchpy/client/dangling_indices.pyi +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class DanglingIndicesClient(NamespacedClient): - def delete_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def import_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def list_dangling_indices( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/features.py b/opensearchpy/client/features.py index b96ea308..c6520fa1 100644 --- a/opensearchpy/client/features.py +++ b/opensearchpy/client/features.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params("master_timeout", "cluster_manager_timeout") - def get_features(self, params=None, headers=None): + def get_features(self, params: Any = None, headers: Any = None) -> Any: """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot @@ -47,7 +49,7 @@ def get_features(self, params=None, headers=None): ) @query_params() - def reset_features(self, params=None, headers=None): + def reset_features(self, params: Any = None, headers: Any = None) -> Any: """ Resets the internal state of features, usually by deleting system indices diff --git a/opensearchpy/client/features.pyi b/opensearchpy/client/features.pyi deleted file mode 100644 index 6abcd79e..00000000 --- a/opensearchpy/client/features.pyi +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class FeaturesClient(NamespacedClient): - def get_features( - self, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reset_features( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 3f8df6c6..7cdc7e57 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -36,12 +36,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params() - def analyze(self, body=None, index=None, params=None, headers=None): + def analyze( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the text. @@ -60,7 +68,12 @@ def analyze(self, body=None, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def refresh(self, index=None, params=None, headers=None): + def refresh( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the refresh operation in one or more indices. @@ -87,7 +100,12 @@ def refresh(self, index=None, params=None, headers=None): "ignore_unavailable", "wait_if_ongoing", ) - def flush(self, index=None, params=None, headers=None): + def flush( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the flush operation on one or more indices. @@ -119,7 +137,13 @@ def flush(self, index=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - def create(self, index, body=None, params=None, headers=None): + def create( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates an index with optional settings and mappings. @@ -146,7 +170,14 @@ def create(self, index, body=None, params=None, headers=None): @query_params( "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - def clone(self, index, target, body=None, params=None, headers=None): + def clone( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clones an index. @@ -186,7 +217,12 @@ def clone(self, index, target, body=None, params=None, headers=None): "local", "master_timeout", ) - def get(self, index, params=None, headers=None): + def get( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about one or more indices. @@ -229,7 +265,12 @@ def get(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def open(self, index, params=None, headers=None): + def open( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Opens an index. @@ -268,7 +309,12 @@ def open(self, index, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def close(self, index, params=None, headers=None): + def close( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Closes an index. @@ -306,7 +352,12 @@ def close(self, index, params=None, headers=None): "master_timeout", "timeout", ) - def delete(self, index, params=None, headers=None): + def delete( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index. @@ -344,7 +395,12 @@ def delete(self, index, params=None, headers=None): "include_defaults", "local", ) - def exists(self, index, params=None, headers=None): + def exists( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index exists. @@ -382,7 +438,13 @@ def exists(self, index, params=None, headers=None): "timeout", "write_index_only", ) - def put_mapping(self, body, index=None, params=None, headers=None): + def put_mapping( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index mappings. @@ -429,7 +491,12 @@ def put_mapping(self, body, index=None, params=None, headers=None): "local", "master_timeout", ) - def get_mapping(self, index=None, params=None, headers=None): + def get_mapping( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mappings for one or more indices. @@ -463,7 +530,13 @@ def get_mapping(self, index=None, params=None, headers=None): "include_defaults", "local", ) - def get_field_mapping(self, fields, index=None, params=None, headers=None): + def get_field_mapping( + self, + fields: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mapping for one or more fields. @@ -494,7 +567,14 @@ def get_field_mapping(self, fields, index=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def put_alias(self, index, name, body=None, params=None, headers=None): + def put_alias( + self, + index: Any, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an alias. @@ -524,7 +604,13 @@ def put_alias(self, index, name, body=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - def exists_alias(self, name, index=None, params=None, headers=None): + def exists_alias( + self, + name: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular alias exists. @@ -550,7 +636,13 @@ def exists_alias(self, name, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - def get_alias(self, index=None, name=None, params=None, headers=None): + def get_alias( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an alias. @@ -573,7 +665,12 @@ def get_alias(self, index=None, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def update_aliases(self, body, params=None, headers=None): + def update_aliases( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates index aliases. @@ -594,7 +691,13 @@ def update_aliases(self, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_alias(self, index, name, params=None, headers=None): + def delete_alias( + self, + index: Any, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an alias. @@ -619,7 +722,13 @@ def delete_alias(self, index, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "create", "master_timeout", "order") - def put_template(self, name, body, params=None, headers=None): + def put_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. @@ -650,7 +759,12 @@ def put_template(self, name, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def exists_template(self, name, params=None, headers=None): + def exists_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -674,7 +788,12 @@ def exists_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def get_template(self, name=None, params=None, headers=None): + def get_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. @@ -695,7 +814,12 @@ def get_template(self, name=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_template(self, name, params=None, headers=None): + def delete_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. @@ -725,7 +849,13 @@ def delete_template(self, name, params=None, headers=None): "local", "master_timeout", ) - def get_settings(self, index=None, name=None, params=None, headers=None): + def get_settings( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns settings for one or more indices. @@ -767,7 +897,13 @@ def get_settings(self, index=None, name=None, params=None, headers=None): "preserve_existing", "timeout", ) - def put_settings(self, body, index=None, params=None, headers=None): + def put_settings( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index settings. @@ -817,7 +953,13 @@ def put_settings(self, body, index=None, params=None, headers=None): "include_unloaded_segments", "level", ) - def stats(self, index=None, metric=None, params=None, headers=None): + def stats( + self, + index: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in an index. @@ -858,7 +1000,12 @@ def stats(self, index=None, metric=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose" ) - def segments(self, index=None, params=None, headers=None): + def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides low-level information about segments in a Lucene index. @@ -894,7 +1041,13 @@ def segments(self, index=None, params=None, headers=None): "q", "rewrite", ) - def validate_query(self, body=None, index=None, params=None, headers=None): + def validate_query( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. @@ -943,7 +1096,12 @@ def validate_query(self, body=None, index=None, params=None, headers=None): "query", "request", ) - def clear_cache(self, index=None, params=None, headers=None): + def clear_cache( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears all or specific caches for one or more indices. @@ -969,7 +1127,12 @@ def clear_cache(self, index=None, params=None, headers=None): ) @query_params("active_only", "detailed") - def recovery(self, index=None, params=None, headers=None): + def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about ongoing index shard recoveries. @@ -992,7 +1155,12 @@ def recovery(self, index=None, params=None, headers=None): "only_ancient_segments", "wait_for_completion", ) - def upgrade(self, index=None, params=None, headers=None): + def upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1017,7 +1185,12 @@ def upgrade(self, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def get_upgrade(self, index=None, params=None, headers=None): + def get_upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ The _upgrade API is no longer useful and will be removed. @@ -1040,7 +1213,12 @@ def get_upgrade(self, index=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status" ) - def shard_stores(self, index=None, params=None, headers=None): + def shard_stores( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides store information for shard copies of indices. @@ -1070,7 +1248,12 @@ def shard_stores(self, index=None, params=None, headers=None): "max_num_segments", "only_expunge_deletes", ) - def forcemerge(self, index=None, params=None, headers=None): + def forcemerge( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the force merge operation on one or more indices. @@ -1103,7 +1286,14 @@ def forcemerge(self, index=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def shrink(self, index, target, body=None, params=None, headers=None): + def shrink( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. @@ -1142,7 +1332,14 @@ def shrink(self, index, target, body=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def split(self, index, target, body=None, params=None, headers=None): + def split( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows you to split an existing index into a new index with more primary shards. @@ -1182,7 +1379,14 @@ def split(self, index, target, body=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def rollover(self, alias, body=None, new_index=None, params=None, headers=None): + def rollover( + self, + alias: Any, + body: Any = None, + new_index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. @@ -1217,7 +1421,13 @@ def rollover(self, alias, body=None, new_index=None, params=None, headers=None): ) @query_params() - def create_data_stream(self, name, body=None, params=None, headers=None): + def create_data_stream( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a data stream. @@ -1237,7 +1447,12 @@ def create_data_stream(self, name, body=None, params=None, headers=None): ) @query_params() - def delete_data_stream(self, name, params=None, headers=None): + def delete_data_stream( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a data stream. @@ -1253,7 +1468,12 @@ def delete_data_stream(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_index_template(self, name, params=None, headers=None): + def delete_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. @@ -1277,7 +1497,12 @@ def delete_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def exists_index_template(self, name, params=None, headers=None): + def exists_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. @@ -1301,7 +1526,12 @@ def exists_index_template(self, name, params=None, headers=None): ) @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") - def get_index_template(self, name=None, params=None, headers=None): + def get_index_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. @@ -1322,7 +1552,13 @@ def get_index_template(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - def put_index_template(self, name, body, params=None, headers=None): + def put_index_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. @@ -1352,7 +1588,13 @@ def put_index_template(self, name, body, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - def simulate_index_template(self, name, body=None, params=None, headers=None): + def simulate_index_template( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Simulate matching the given index name against the index templates in the system. @@ -1385,7 +1627,12 @@ def simulate_index_template(self, name, body=None, params=None, headers=None): ) @query_params() - def get_data_stream(self, name=None, params=None, headers=None): + def get_data_stream( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns data streams. @@ -1398,7 +1645,13 @@ def get_data_stream(self, name=None, params=None, headers=None): ) @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") - def simulate_template(self, body=None, name=None, params=None, headers=None): + def simulate_template( + self, + body: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Simulate resolving the given template name or body. @@ -1426,7 +1679,12 @@ def simulate_template(self, body=None, name=None, params=None, headers=None): ) @query_params("expand_wildcards") - def resolve_index(self, name, params=None, headers=None): + def resolve_index( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about any matching indices, aliases, and data streams. @@ -1452,7 +1710,13 @@ def resolve_index(self, name, params=None, headers=None): "master_timeout", "timeout", ) - def add_block(self, index, block, params=None, headers=None): + def add_block( + self, + index: Any, + block: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds a block to an index. @@ -1484,7 +1748,12 @@ def add_block(self, index, block, params=None, headers=None): ) @query_params() - def data_streams_stats(self, name=None, params=None, headers=None): + def data_streams_stats( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in a data stream. diff --git a/opensearchpy/client/indices.pyi b/opensearchpy/client/indices.pyi deleted file mode 100644 index 87048693..00000000 --- a/opensearchpy/client/indices.pyi +++ /dev/null @@ -1,1097 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IndicesClient(NamespacedClient): - def analyze( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def refresh( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def flush( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - force: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - wait_if_ongoing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create( - self, - index: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clone( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def open( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def close( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def put_mapping( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - write_index_only: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_mapping( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_field_mapping( - self, - fields: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_alias( - self, - index: Any, - name: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_alias( - self, - name: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_alias( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_aliases( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_alias( - self, - index: Any, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_template( - self, - name: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - order: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_settings( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_settings( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - preserve_existing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - index: Optional[Any] = ..., - metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - forbid_closed_indices: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segments( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def validate_query( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - all_shards: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - q: Optional[Any] = ..., - rewrite: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clear_cache( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - query: Optional[Any] = ..., - request: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - only_ancient_segments: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shard_stores( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def forcemerge( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - max_num_segments: Optional[Any] = ..., - only_expunge_deletes: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shrink( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def split( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def rollover( - self, - alias: Any, - *, - body: Optional[Any] = ..., - new_index: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_data_stream( - self, - name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_index_template( - self, - name: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_index_template( - self, - *, - name: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_index_template( - self, - name: Any, - *, - body: Any, - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate_index_template( - self, - name: Any, - *, - body: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_data_stream( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate_template( - self, - *, - body: Optional[Any] = ..., - name: Optional[Any] = ..., - cause: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def resolve_index( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def add_block( - self, - index: Any, - block: Any, - *, - allow_no_indices: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def data_streams_stats( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index 6282c7b8..4bf558b9 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -36,12 +36,19 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout") - def get_pipeline(self, id=None, params=None, headers=None): + def get_pipeline( + self, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a pipeline. @@ -59,7 +66,13 @@ def get_pipeline(self, id=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def put_pipeline(self, id, body, params=None, headers=None): + def put_pipeline( + self, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a pipeline. @@ -86,7 +99,12 @@ def put_pipeline(self, id, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_pipeline(self, id, params=None, headers=None): + def delete_pipeline( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a pipeline. @@ -110,7 +128,13 @@ def delete_pipeline(self, id, params=None, headers=None): ) @query_params("verbose") - def simulate(self, body, id=None, params=None, headers=None): + def simulate( + self, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to simulate a pipeline with example documents. @@ -132,7 +156,11 @@ def simulate(self, body, id=None, params=None, headers=None): ) @query_params() - def processor_grok(self, params=None, headers=None): + def processor_grok( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of the built-in patterns. diff --git a/opensearchpy/client/ingest.pyi b/opensearchpy/client/ingest.pyi deleted file mode 100644 index c7531f0e..00000000 --- a/opensearchpy/client/ingest.pyi +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IngestClient(NamespacedClient): - def get_pipeline( - self, - *, - id: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_pipeline( - self, - id: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_pipeline( - self, - id: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate( - self, - *, - body: Any, - id: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def processor_grok( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 28ea1357..6a7b5db1 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -36,14 +36,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params("timeout") def reload_secure_settings( - self, body=None, node_id=None, params=None, headers=None - ): + self, + body: Any = None, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reloads secure settings. @@ -64,7 +70,13 @@ def reload_secure_settings( ) @query_params("flat_settings", "timeout") - def info(self, node_id=None, metric=None, params=None, headers=None): + def info( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about nodes in the cluster. @@ -95,8 +107,13 @@ def info(self, node_id=None, metric=None, params=None, headers=None): "types", ) def stats( - self, node_id=None, metric=None, index_metric=None, params=None, headers=None - ): + self, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns statistical information about nodes in the cluster. @@ -140,7 +157,12 @@ def stats( @query_params( "doc_type", "ignore_idle_threads", "interval", "snapshots", "threads", "timeout" ) - def hot_threads(self, node_id=None, params=None, headers=None): + def hot_threads( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about hot threads on each node in the cluster. @@ -173,7 +195,13 @@ def hot_threads(self, node_id=None, params=None, headers=None): ) @query_params("timeout") - def usage(self, node_id=None, metric=None, params=None, headers=None): + def usage( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns low-level information about REST actions usage on nodes. diff --git a/opensearchpy/client/nodes.pyi b/opensearchpy/client/nodes.pyi deleted file mode 100644 index 78465481..00000000 --- a/opensearchpy/client/nodes.pyi +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class NodesClient(NamespacedClient): - def reload_secure_settings( - self, - *, - body: Optional[Any] = ..., - node_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def info( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - index_metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - level: Optional[Any] = ..., - timeout: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def hot_threads( - self, - *, - node_id: Optional[Any] = ..., - doc_type: Optional[Any] = ..., - ignore_idle_threads: Optional[Any] = ..., - interval: Optional[Any] = ..., - snapshots: Optional[Any] = ..., - threads: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def usage( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/plugins.py b/opensearchpy/client/plugins.py index 19570be4..b12214d7 100644 --- a/opensearchpy/client/plugins.py +++ b/opensearchpy/client/plugins.py @@ -9,14 +9,19 @@ # GitHub history for details. import warnings +from typing import Any from ..plugins.alerting import AlertingClient from ..plugins.index_management import IndexManagementClient +from .client import Client from .utils import NamespacedClient class PluginsClient(NamespacedClient): - def __init__(self, client): + alerting: Any + index_management: Any + + def __init__(self, client: Client) -> None: super(PluginsClient, self).__init__(client) # self.query_workbench = QueryWorkbenchClient(client) # self.reporting = ReportingClient(client) @@ -28,7 +33,7 @@ def __init__(self, client): self._dynamic_lookup(client) - def _dynamic_lookup(self, client): + def _dynamic_lookup(self, client: Any) -> None: # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 plugins = [ diff --git a/opensearchpy/client/plugins.pyi b/opensearchpy/client/plugins.pyi deleted file mode 100644 index da9a7488..00000000 --- a/opensearchpy/client/plugins.pyi +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any - -from ..client import OpenSearch -from ..plugins.alerting import AlertingClient as AlertingClient -from .utils import NamespacedClient as NamespacedClient - -class PluginsClient(NamespacedClient): - alerting: Any - index_management: Any - def __init__(self, client: OpenSearch) -> None: ... diff --git a/opensearchpy/client/remote.py b/opensearchpy/client/remote.py index eba66927..5c1c0f0c 100644 --- a/opensearchpy/client/remote.py +++ b/opensearchpy/client/remote.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() - def info(self, params=None, headers=None): + def info(self, params: Any = None, headers: Any = None) -> Any: return self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) diff --git a/opensearchpy/client/remote.pyi b/opensearchpy/client/remote.pyi deleted file mode 100644 index 93e8c067..00000000 --- a/opensearchpy/client/remote.pyi +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteClient(NamespacedClient): - def info( - self, - *, - timeout: Optional[Any] = None, - pretty: Optional[bool] = None, - human: Optional[bool] = None, - error_trace: Optional[bool] = None, - format: Optional[str] = None, - filter_path: Optional[Union[str, Collection[str]]] = None, - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, - ) -> Any: ... diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index 8f4313b7..a019a99c 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -7,6 +7,7 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. + # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # @@ -17,12 +18,19 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, query_params class RemoteStoreClient(NamespacedClient): @query_params("cluster_manager_timeout", "wait_for_completion") - def restore(self, body, params=None, headers=None): + def restore( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Restores from remote store. diff --git a/opensearchpy/client/remote_store.pyi b/opensearchpy/client/remote_store.pyi deleted file mode 100644 index 50358e63..00000000 --- a/opensearchpy/client/remote_store.pyi +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteStoreClient(NamespacedClient): - def restore( - self, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 14bc0229..6d1574ea 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -8,7 +8,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - # ---------------------------------------------------- # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # @@ -19,14 +18,20 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): - from ._patch import health_check, update_audit_config + from ._patch import health_check, update_audit_config # type: ignore @query_params() - def get_account_details(self, params=None, headers=None): + def get_account_details( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns account details for the current user. @@ -36,7 +41,12 @@ def get_account_details(self, params=None, headers=None): ) @query_params() - def change_password(self, body, params=None, headers=None): + def change_password( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the password for the current user. @@ -54,7 +64,12 @@ def change_password(self, body, params=None, headers=None): ) @query_params() - def get_action_group(self, action_group, params=None, headers=None): + def get_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one action group. @@ -74,7 +89,11 @@ def get_action_group(self, action_group, params=None, headers=None): ) @query_params() - def get_action_groups(self, params=None, headers=None): + def get_action_groups( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all action groups. @@ -87,7 +106,12 @@ def get_action_groups(self, params=None, headers=None): ) @query_params() - def delete_action_group(self, action_group, params=None, headers=None): + def delete_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete a specified action group. @@ -107,7 +131,13 @@ def delete_action_group(self, action_group, params=None, headers=None): ) @query_params() - def create_action_group(self, action_group, body, params=None, headers=None): + def create_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified action group. @@ -128,7 +158,13 @@ def create_action_group(self, action_group, body, params=None, headers=None): ) @query_params() - def patch_action_group(self, action_group, body, params=None, headers=None): + def patch_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an action group. @@ -147,7 +183,12 @@ def patch_action_group(self, action_group, body, params=None, headers=None): ) @query_params() - def patch_action_groups(self, body, params=None, headers=None): + def patch_action_groups( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. @@ -165,7 +206,12 @@ def patch_action_groups(self, body, params=None, headers=None): ) @query_params() - def get_user(self, username, params=None, headers=None): + def get_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieve one internal user. @@ -182,7 +228,11 @@ def get_user(self, username, params=None, headers=None): ) @query_params() - def get_users(self, params=None, headers=None): + def get_users( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieve all internal users. @@ -195,7 +245,12 @@ def get_users(self, params=None, headers=None): ) @query_params() - def delete_user(self, username, params=None, headers=None): + def delete_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete the specified user. @@ -212,7 +267,13 @@ def delete_user(self, username, params=None, headers=None): ) @query_params() - def create_user(self, username, body, params=None, headers=None): + def create_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified user. @@ -231,7 +292,13 @@ def create_user(self, username, body, params=None, headers=None): ) @query_params() - def patch_user(self, username, body, params=None, headers=None): + def patch_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an internal user. @@ -250,7 +317,12 @@ def patch_user(self, username, body, params=None, headers=None): ) @query_params() - def patch_users(self, body, params=None, headers=None): + def patch_users( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. @@ -268,7 +340,12 @@ def patch_users(self, body, params=None, headers=None): ) @query_params() - def get_role(self, role, params=None, headers=None): + def get_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role. @@ -285,7 +362,11 @@ def get_role(self, role, params=None, headers=None): ) @query_params() - def get_roles(self, params=None, headers=None): + def get_roles( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all roles. @@ -295,7 +376,12 @@ def get_roles(self, params=None, headers=None): ) @query_params() - def delete_role(self, role, params=None, headers=None): + def delete_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete the specified role. @@ -312,7 +398,13 @@ def delete_role(self, role, params=None, headers=None): ) @query_params() - def create_role(self, role, body, params=None, headers=None): + def create_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role. @@ -331,7 +423,13 @@ def create_role(self, role, body, params=None, headers=None): ) @query_params() - def patch_role(self, role, body, params=None, headers=None): + def patch_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role. @@ -350,7 +448,12 @@ def patch_role(self, role, body, params=None, headers=None): ) @query_params() - def patch_roles(self, body, params=None, headers=None): + def patch_roles( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. @@ -368,7 +471,12 @@ def patch_roles(self, body, params=None, headers=None): ) @query_params() - def get_role_mapping(self, role, params=None, headers=None): + def get_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role mapping. @@ -385,7 +493,11 @@ def get_role_mapping(self, role, params=None, headers=None): ) @query_params() - def get_role_mappings(self, params=None, headers=None): + def get_role_mappings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all role mappings. @@ -398,7 +510,12 @@ def get_role_mappings(self, params=None, headers=None): ) @query_params() - def delete_role_mapping(self, role, params=None, headers=None): + def delete_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes the specified role mapping. @@ -415,7 +532,13 @@ def delete_role_mapping(self, role, params=None, headers=None): ) @query_params() - def create_role_mapping(self, role, body, params=None, headers=None): + def create_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role mapping. @@ -434,7 +557,13 @@ def create_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - def patch_role_mapping(self, role, body, params=None, headers=None): + def patch_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role mapping. @@ -453,7 +582,12 @@ def patch_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - def patch_role_mappings(self, body, params=None, headers=None): + def patch_role_mappings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates multiple role mappings in a single call. @@ -471,7 +605,12 @@ def patch_role_mappings(self, body, params=None, headers=None): ) @query_params() - def get_tenant(self, tenant, params=None, headers=None): + def get_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one tenant. @@ -488,7 +627,11 @@ def get_tenant(self, tenant, params=None, headers=None): ) @query_params() - def get_tenants(self, params=None, headers=None): + def get_tenants( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all tenants. @@ -498,7 +641,12 @@ def get_tenants(self, params=None, headers=None): ) @query_params() - def delete_tenant(self, tenant, params=None, headers=None): + def delete_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Delete the specified tenant. @@ -515,7 +663,13 @@ def delete_tenant(self, tenant, params=None, headers=None): ) @query_params() - def create_tenant(self, tenant, body, params=None, headers=None): + def create_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified tenant. @@ -534,7 +688,13 @@ def create_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - def patch_tenant(self, tenant, body, params=None, headers=None): + def patch_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify a single tenant. @@ -553,7 +713,12 @@ def patch_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - def patch_tenants(self, body, params=None, headers=None): + def patch_tenants( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify multiple tenants in a single call. @@ -571,7 +736,11 @@ def patch_tenants(self, body, params=None, headers=None): ) @query_params() - def get_configuration(self, params=None, headers=None): + def get_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the current Security plugin configuration in JSON format. @@ -584,7 +753,12 @@ def get_configuration(self, params=None, headers=None): ) @query_params() - def update_configuration(self, body, params=None, headers=None): + def update_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds or updates the existing configuration using the REST API. @@ -602,7 +776,12 @@ def update_configuration(self, body, params=None, headers=None): ) @query_params() - def patch_configuration(self, body, params=None, headers=None): + def patch_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ A PATCH call is used to update the existing configuration using the REST API. @@ -620,7 +799,12 @@ def patch_configuration(self, body, params=None, headers=None): ) @query_params() - def get_distinguished_names(self, cluster_name=None, params=None, headers=None): + def get_distinguished_names( + self, + cluster_name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all distinguished names in the allow list. @@ -635,8 +819,12 @@ def get_distinguished_names(self, cluster_name=None, params=None, headers=None): @query_params() def update_distinguished_names( - self, cluster_name, body=None, params=None, headers=None - ): + self, + cluster_name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds or updates the specified distinguished names in the cluster’s or node’s allow list. @@ -657,7 +845,12 @@ def update_distinguished_names( ) @query_params() - def delete_distinguished_names(self, cluster_name, params=None, headers=None): + def delete_distinguished_names( + self, + cluster_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes all distinguished names in the specified cluster’s or node’s allow list. @@ -677,7 +870,11 @@ def delete_distinguished_names(self, cluster_name, params=None, headers=None): ) @query_params() - def get_certificates(self, params=None, headers=None): + def get_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves the cluster’s security certificates. @@ -687,7 +884,11 @@ def get_certificates(self, params=None, headers=None): ) @query_params() - def reload_transport_certificates(self, params=None, headers=None): + def reload_transport_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reload transport layer communication certificates. @@ -700,7 +901,11 @@ def reload_transport_certificates(self, params=None, headers=None): ) @query_params() - def reload_http_certificates(self, params=None, headers=None): + def reload_http_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reload HTTP layer communication certificates. @@ -713,7 +918,11 @@ def reload_http_certificates(self, params=None, headers=None): ) @query_params() - def flush_cache(self, params=None, headers=None): + def flush_cache( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. @@ -723,7 +932,11 @@ def flush_cache(self, params=None, headers=None): ) @query_params() - def health(self, params=None, headers=None): + def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Checks to see if the Security plugin is up and running. @@ -733,7 +946,11 @@ def health(self, params=None, headers=None): ) @query_params() - def get_audit_configuration(self, params=None, headers=None): + def get_audit_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves the audit configuration. @@ -743,7 +960,12 @@ def get_audit_configuration(self, params=None, headers=None): ) @query_params() - def update_audit_configuration(self, body, params=None, headers=None): + def update_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the audit configuration. @@ -761,7 +983,12 @@ def update_audit_configuration(self, body, params=None, headers=None): ) @query_params() - def patch_audit_configuration(self, body, params=None, headers=None): + def patch_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. @@ -779,7 +1006,12 @@ def patch_audit_configuration(self, body, params=None, headers=None): ) @query_params() - def patch_distinguished_names(self, body, params=None, headers=None): + def patch_distinguished_names( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Bulk update of distinguished names. diff --git a/opensearchpy/client/security.pyi b/opensearchpy/client/security.pyi deleted file mode 100644 index 99e009d9..00000000 --- a/opensearchpy/client/security.pyi +++ /dev/null @@ -1,821 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SecurityClient(NamespacedClient): - def get_account_details( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def change_password( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_action_groups( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_action_group( - self, - action_group: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_action_group( - self, - action_group: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_action_groups( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_users( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_user( - self, - username: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_user( - self, - username: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_users( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_roles( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_role( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_role( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_roles( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_role_mappings( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_role_mapping( - self, - role: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_role_mapping( - self, - role: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_role_mappings( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_tenants( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_tenant( - self, - tenant: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_tenant( - self, - tenant: Any, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_tenants( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_distinguished_names( - self, - *, - cluster_name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_distinguished_names( - self, - cluster_name: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_distinguished_names( - self, - cluster_name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reload_transport_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reload_http_certificates( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def flush_cache( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def health( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_audit_configuration( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_audit_configuration( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def patch_distinguished_names( - self, - *, - body: Any, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index 313f7dd3..fe6536fa 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -36,12 +36,21 @@ # ----------------------------------------------------- +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - def create(self, repository, snapshot, body=None, params=None, headers=None): + def create( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a snapshot in a repository. @@ -70,7 +79,13 @@ def create(self, repository, snapshot, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout") - def delete(self, repository, snapshot, params=None, headers=None): + def delete( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a snapshot. @@ -97,7 +112,13 @@ def delete(self, repository, snapshot, params=None, headers=None): @query_params( "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) - def get(self, repository, snapshot, params=None, headers=None): + def get( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a snapshot. @@ -127,7 +148,12 @@ def get(self, repository, snapshot, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def delete_repository(self, repository, params=None, headers=None): + def delete_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a repository. @@ -152,7 +178,12 @@ def delete_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "local", "master_timeout") - def get_repository(self, repository=None, params=None, headers=None): + def get_repository( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a repository. @@ -171,7 +202,13 @@ def get_repository(self, repository=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") - def create_repository(self, repository, body, params=None, headers=None): + def create_repository( + self, + repository: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a repository. @@ -199,7 +236,14 @@ def create_repository(self, repository, body, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") - def restore(self, repository, snapshot, body=None, params=None, headers=None): + def restore( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Restores a snapshot. @@ -228,7 +272,13 @@ def restore(self, repository, snapshot, body=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") - def status(self, repository=None, snapshot=None, params=None, headers=None): + def status( + self, + repository: Any = None, + snapshot: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the status of a snapshot. @@ -252,7 +302,12 @@ def status(self, repository=None, snapshot=None, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def verify_repository(self, repository, params=None, headers=None): + def verify_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Verifies a repository. @@ -276,7 +331,12 @@ def verify_repository(self, repository, params=None, headers=None): ) @query_params("cluster_manager_timeout", "master_timeout", "timeout") - def cleanup_repository(self, repository, params=None, headers=None): + def cleanup_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes stale data from repository. @@ -301,8 +361,14 @@ def cleanup_repository(self, repository, params=None, headers=None): @query_params("cluster_manager_timeout", "master_timeout") def clone( - self, repository, snapshot, target_snapshot, body, params=None, headers=None - ): + self, + repository: Any, + snapshot: Any, + target_snapshot: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. diff --git a/opensearchpy/client/snapshot.pyi b/opensearchpy/client/snapshot.pyi deleted file mode 100644 index fd239fad..00000000 --- a/opensearchpy/client/snapshot.pyi +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SnapshotClient(NamespacedClient): - def create( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - repository: Any, - snapshot: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_repository( - self, - *, - repository: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_repository( - self, - repository: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - verify: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def restore( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def status( - self, - *, - repository: Optional[Any] = ..., - snapshot: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def verify_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cleanup_repository( - self, - repository: Any, - *, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clone( - self, - repository: Any, - snapshot: Any, - target_snapshot: Any, - *, - body: Any, - cluster_manager_timeout: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 90c4e731..7e675233 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -37,6 +37,7 @@ import warnings +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -51,7 +52,11 @@ class TasksClient(NamespacedClient): "timeout", "wait_for_completion", ) - def list(self, params=None, headers=None): + def list( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of tasks. @@ -77,7 +82,12 @@ def list(self, params=None, headers=None): ) @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") - def cancel(self, task_id=None, params=None, headers=None): + def cancel( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Cancels a task, if it can be cancelled through an API. @@ -103,7 +113,12 @@ def cancel(self, task_id=None, params=None, headers=None): ) @query_params("timeout", "wait_for_completion") - def get(self, task_id=None, params=None, headers=None): + def get( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a task. diff --git a/opensearchpy/client/tasks.pyi b/opensearchpy/client/tasks.pyi deleted file mode 100644 index 0aeed153..00000000 --- a/opensearchpy/client/tasks.pyi +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. -# -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class TasksClient(NamespacedClient): - def list( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - group_by: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cancel( - self, - *, - task_id: Optional[Any] = ..., - actions: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - *, - task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index 7b7366de..3ae204e6 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -32,14 +32,17 @@ import weakref from datetime import date, datetime from functools import wraps +from typing import Any, Callable + +from opensearchpy.serializer import Serializer from ..compat import quote, string_types, to_bytes, to_str, unquote, urlparse # parts of URL to be omitted -SKIP_IN_PATH = (None, "", b"", [], ()) +SKIP_IN_PATH: Any = (None, "", b"", [], ()) -def _normalize_hosts(hosts): +def _normalize_hosts(hosts: Any) -> Any: """ Helper function to transform hosts argument to :class:`~opensearchpy.OpenSearch` to a list of dicts. @@ -57,7 +60,7 @@ def _normalize_hosts(hosts): for host in hosts: if isinstance(host, string_types): if "://" not in host: - host = "//%s" % host + host = "//%s" % host # type: ignore parsed_url = urlparse(host) h = {"host": parsed_url.hostname} @@ -84,7 +87,7 @@ def _normalize_hosts(hosts): return out -def _escape(value): +def _escape(value: Any) -> Any: """ Escape a single value of a URL string or a query parameter. If it is a list or tuple, turn it into a comma-separated string first. @@ -114,7 +117,7 @@ def _escape(value): return str(value) -def _make_path(*parts): +def _make_path(*parts: Any) -> str: """ Create a URL string from parts, omit all `None` values and empty strings. Convert lists and tuples to comma separated values. @@ -132,15 +135,15 @@ def _make_path(*parts): GLOBAL_PARAMS = ("pretty", "human", "error_trace", "format", "filter_path") -def query_params(*opensearch_query_params): +def query_params(*opensearch_query_params: Any) -> Callable: # type: ignore """ Decorator that pops all accepted parameters from method's kwargs and puts them in the params argument. """ - def _wrapper(func): + def _wrapper(func: Any) -> Any: @wraps(func) - def _wrapped(*args, **kwargs): + def _wrapped(*args: Any, **kwargs: Any) -> Any: params = (kwargs.pop("params", None) or {}).copy() headers = { k.lower(): v @@ -182,7 +185,7 @@ def _wrapped(*args, **kwargs): return _wrapper -def _bulk_body(serializer, body): +def _bulk_body(serializer: Serializer, body: str) -> str: # if not passed in a string, serialize items and join by newline if not isinstance(body, string_types): body = "\n".join(map(serializer.dumps, body)) @@ -197,7 +200,7 @@ def _bulk_body(serializer, body): return body -def _base64_auth_header(auth_value): +def _base64_auth_header(auth_value: Any) -> str: """Takes either a 2-tuple or a base64-encoded string and returns a base64-encoded string to be used as an HTTP authorization header. @@ -208,17 +211,17 @@ def _base64_auth_header(auth_value): class NamespacedClient(object): - def __init__(self, client): + def __init__(self, client: Any) -> None: self.client = client @property - def transport(self): + def transport(self) -> Any: return self.client.transport class AddonClient(NamespacedClient): @classmethod - def infect_client(cls, client): + def infect_client(cls: Any, client: NamespacedClient) -> NamespacedClient: addon = cls(weakref.proxy(client)) setattr(client, cls.namespace, addon) return client diff --git a/opensearchpy/client/utils.pyi b/opensearchpy/client/utils.pyi deleted file mode 100644 index 2aa263fc..00000000 --- a/opensearchpy/client/utils.pyi +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import unicode_literals - -from typing import ( - Any, - Callable, - Collection, - Dict, - List, - Optional, - Tuple, - TypeVar, - Union, -) - -from ..client import OpenSearch -from ..serializer import Serializer -from ..transport import Transport - -T = TypeVar("T") -SKIP_IN_PATH: Collection[Any] - -def _normalize_hosts( - hosts: Optional[Union[str, Collection[Union[str, Dict[str, Any]]]]] -) -> List[Dict[str, Any]]: ... -def _escape(value: Any) -> str: ... -def _make_path(*parts: Any) -> str: ... - -GLOBAL_PARAMS: Tuple[str, ...] - -def query_params( - *es_query_params: str, -) -> Callable[[Callable[..., T]], Callable[..., T]]: ... -def _bulk_body( - serializer: Serializer, body: Union[str, bytes, Collection[Any]] -) -> str: ... - -class NamespacedClient: - client: OpenSearch - def __init__(self, client: OpenSearch) -> None: ... - @property - def transport(self) -> Transport: ... diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index 57a88a74..cb8bc7d7 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -26,33 +26,29 @@ # under the License. +from collections.abc import Mapping from queue import Queue +from typing import Tuple, Type, Union from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse string_types = str, bytes map = map -def to_str(x, encoding="ascii"): +def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: if not isinstance(x, str): return x.decode(encoding) return x -def to_bytes(x, encoding="ascii"): +def to_bytes(x: Union[str, bytes], encoding: str = "ascii") -> bytes: if not isinstance(x, bytes): return x.encode(encoding) return x try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - - -try: - reraise_exceptions = (RecursionError,) + reraise_exceptions: Tuple[Type[BaseException], ...] = (RecursionError,) except NameError: reraise_exceptions = () diff --git a/opensearchpy/compat.pyi b/opensearchpy/compat.pyi deleted file mode 100644 index 2606c723..00000000 --- a/opensearchpy/compat.pyi +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys -from typing import Callable, Tuple, Type, Union - -string_types: Tuple[type, ...] - -to_str: Callable[[Union[str, bytes]], str] -to_bytes: Callable[[Union[str, bytes]], bytes] -reraise_exceptions: Tuple[Type[Exception], ...] - -if sys.version_info[0] == 2: - from itertools import imap as map - from urllib import quote as quote - from urllib import quote_plus as quote_plus - from urllib import unquote as unquote - from urllib import urlencode as urlencode - - from Queue import Queue as Queue - from urlparse import urlparse as urlparse -else: - from urllib.parse import quote as quote - from urllib.parse import quote_plus as quote_plus - from urllib.parse import unquote as unquote - from urllib.parse import urlencode as urlencode - from urllib.parse import urlparse as urlparse - - map = map - from queue import Queue as Queue diff --git a/opensearchpy/connection/__init__.pyi b/opensearchpy/connection/__init__.pyi deleted file mode 100644 index f3f31016..00000000 --- a/opensearchpy/connection/__init__.pyi +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .base import Connection as Connection -from .http_async import AsyncHttpConnection as AsyncHttpConnection -from .http_requests import RequestsHttpConnection as RequestsHttpConnection -from .http_urllib3 import Urllib3HttpConnection as Urllib3HttpConnection -from .http_urllib3 import create_ssl_context as create_ssl_context diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index 87dd22d7..87467ae0 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from six import string_types import opensearchpy @@ -21,11 +23,11 @@ class AsyncConnections(object): singleton in this module. """ - def __init__(self): - self._kwargs = {} - self._conns = {} + def __init__(self) -> None: + self._kwargs: Any = {} + self._conns: Any = {} - async def configure(self, **kwargs): + async def configure(self, **kwargs: Any) -> None: """ Configure multiple connections at once, useful for passing in config dictionaries obtained from other sources, like Django's settings or a @@ -48,13 +50,13 @@ async def configure(self, **kwargs): del self._conns[k] self._kwargs = kwargs - async def add_connection(self, alias, conn): + async def add_connection(self, alias: str, conn: Any) -> None: """ Add a connection object, it will be passed through as-is. """ self._conns[alias] = conn - async def remove_connection(self, alias): + async def remove_connection(self, alias: str) -> None: """ Remove connection from the registry. Raises ``KeyError`` if connection wasn't found. @@ -69,7 +71,7 @@ async def remove_connection(self, alias): if errors == 2: raise KeyError("There is no connection with alias %r." % alias) - async def create_connection(self, alias="default", **kwargs): + async def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ Construct an instance of ``opensearchpy.AsyncOpenSearch`` and register it under given alias. @@ -78,7 +80,7 @@ async def create_connection(self, alias="default", **kwargs): conn = self._conns[alias] = opensearchpy.AsyncOpenSearch(**kwargs) return conn - async def get_connection(self, alias="default"): + async def get_connection(self, alias: str = "default") -> Any: """ Retrieve a connection, construct it if necessary (only configuration was passed to us). If a non-string alias has been passed through we diff --git a/opensearchpy/connection/async_connections.pyi b/opensearchpy/connection/async_connections.pyi deleted file mode 100644 index eb310cdf..00000000 --- a/opensearchpy/connection/async_connections.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncConnections: ... diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index ee8d934f..59418bfa 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -25,7 +25,6 @@ # specific language governing permissions and limitations # under the License. - import gzip import io import logging @@ -33,13 +32,14 @@ import re import warnings from platform import python_version +from typing import Any, Collection, Dict, Mapping, Optional, Union try: import simplejson as json except ImportError: - import json + import json # type: ignore -from .. import __versionstr__ +from .._version import __versionstr__ from ..exceptions import HTTP_EXCEPTIONS, OpenSearchWarning, TransportError logger = logging.getLogger("opensearch") @@ -74,16 +74,16 @@ class Connection(object): def __init__( self, - host="localhost", - port=None, - use_ssl=False, - url_prefix="", - timeout=10, - headers=None, - http_compress=None, - opaque_id=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + use_ssl: bool = False, + url_prefix: str = "", + timeout: int = 10, + headers: Optional[Dict[str, str]] = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + **kwargs: Any + ) -> None: if port is None: port = 9200 @@ -130,24 +130,24 @@ def __init__( self.url_prefix = url_prefix self.timeout = timeout - def __repr__(self): + def __repr__(self) -> str: return "<%s: %s>" % (self.__class__.__name__, self.host) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if not isinstance(other, Connection): raise TypeError("Unsupported equality check for %s and %s" % (self, other)) return self.__hash__() == other.__hash__() - def __hash__(self): + def __hash__(self) -> int: return id(self) - def _gzip_compress(self, body): + def _gzip_compress(self, body: Any) -> bytes: buf = io.BytesIO() with gzip.GzipFile(fileobj=buf, mode="wb") as f: f.write(body) return buf.getvalue() - def _raise_warnings(self, warning_headers): + def _raise_warnings(self, warning_headers: Any) -> None: """If 'headers' contains a 'Warning' header raise the warnings to be seen by the user. Takes an iterable of string values from any number of 'Warning' headers. @@ -174,7 +174,7 @@ def _raise_warnings(self, warning_headers): for message in warning_messages: warnings.warn(message, category=OpenSearchWarning) - def _pretty_json(self, data): + def _pretty_json(self, data: Union[str, bytes]) -> str: # pretty JSON in tracer curl logs try: return json.dumps( @@ -182,9 +182,17 @@ def _pretty_json(self, data): ).replace("'", r"\u0027") except (ValueError, TypeError): # non-json data or a bulk request - return data + return data # type: ignore - def _log_trace(self, method, path, body, status_code, response, duration): + def _log_trace( + self, + method: str, + path: str, + body: Optional[Union[str, bytes]], + status_code: Optional[int], + response: Optional[str], + duration: Optional[float], + ) -> None: if not tracer.isEnabledFor(logging.INFO) or not tracer.handlers: return @@ -210,29 +218,33 @@ def _log_trace(self, method, path, body, status_code, response, duration): def perform_request( self, - method, - url, - params=None, - body=None, - timeout=None, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: raise NotImplementedError() def log_request_success( - self, method, full_url, path, body, status_code, response, duration - ): + self, + method: str, + full_url: str, + path: str, + body: Any, + status_code: int, + response: str, + duration: float, + ) -> None: """Log a successful API call.""" # TODO: optionally pass in params instead of full_url and do urlencode only when needed # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth - if body: - try: - body = body.decode("utf-8", "ignore") - except AttributeError: - pass + if body and isinstance(body, bytes): + body = body.decode("utf-8", "ignore") logger.info( "%s %s [status:%s request:%.3fs]", method, full_url, status_code, duration @@ -244,15 +256,15 @@ def log_request_success( def log_request_fail( self, - method, - full_url, - path, - body, - duration, - status_code=None, - response=None, - exception=None, - ): + method: str, + full_url: str, + path: str, + body: Any, + duration: float, + status_code: Optional[int] = None, + response: Optional[str] = None, + exception: Optional[Exception] = None, + ) -> None: """Log an unsuccessful API call.""" # do not log 404s on HEAD requests if method == "HEAD" and status_code == 404: @@ -268,11 +280,8 @@ def log_request_fail( # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth - if body: - try: - body = body.decode("utf-8", "ignore") - except AttributeError: - pass + if body and isinstance(body, bytes): + body = body.decode("utf-8", "ignore") logger.debug("> %s", body) @@ -281,7 +290,12 @@ def log_request_fail( if response is not None: logger.debug("< %s", response) - def _raise_error(self, status_code, raw_data, content_type=None): + def _raise_error( + self, + status_code: int, + raw_data: Union[str, bytes], + content_type: Optional[str] = None, + ) -> None: """Locate appropriate exception and raise it.""" error_message = raw_data additional_info = None @@ -303,11 +317,11 @@ def _raise_error(self, status_code, raw_data, content_type=None): status_code, error_message, additional_info ) - def _get_default_user_agent(self): + def _get_default_user_agent(self) -> str: return "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()) @staticmethod - def default_ca_certs(): + def default_ca_certs() -> Union[str, None]: """ Get the default CA certificate bundle, preferring those configured in the standard OpenSSL environment variables before those provided by @@ -315,12 +329,12 @@ def default_ca_certs(): """ ca_certs = os.environ.get("SSL_CERT_FILE") or os.environ.get("SSL_CERT_DIR") - if ca_certs: - return ca_certs + if not ca_certs: + try: + import certifi - try: - import certifi - except ImportError: - pass - else: - return certifi.where() + ca_certs = certifi.where() + except ImportError: + pass + + return ca_certs diff --git a/opensearchpy/connection/base.pyi b/opensearchpy/connection/base.pyi deleted file mode 100644 index 333f4a70..00000000 --- a/opensearchpy/connection/base.pyi +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import ( - Any, - AnyStr, - Collection, - Dict, - List, - Mapping, - NoReturn, - Optional, - Sequence, - Tuple, - Union, -) - -logger: logging.Logger -tracer: logging.Logger - -class Connection(object): - headers: Dict[str, str] - use_ssl: bool - http_compress: bool - scheme: str - hostname: str - port: Optional[int] - host: str - url_prefix: str - timeout: Optional[Union[float, int]] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - use_ssl: bool = ..., - url_prefix: str = ..., - timeout: Optional[Union[float, int]] = ..., - headers: Optional[Mapping[str, str]] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... - def __repr__(self) -> str: ... - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def _gzip_compress(self, body: bytes) -> bytes: ... - def _raise_warnings(self, warning_headers: Sequence[str]) -> None: ... - def _pretty_json(self, data: Any) -> str: ... - def _log_trace( - self, - method: Any, - path: Any, - body: Any, - status_code: Any, - response: Any, - duration: Any, - ) -> None: ... - def perform_request( - self, - method: str, - url: str, - params: Optional[Mapping[str, Any]] = ..., - body: Optional[bytes] = ..., - timeout: Optional[Union[int, float]] = ..., - ignore: Collection[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ) -> Tuple[int, Mapping[str, str], str]: ... - def log_request_success( - self, - method: str, - full_url: str, - path: str, - body: Optional[bytes], - status_code: int, - response: str, - duration: float, - ) -> None: ... - def log_request_fail( - self, - method: str, - full_url: str, - path: str, - body: Optional[bytes], - duration: float, - status_code: Optional[int] = ..., - response: Optional[str] = ..., - exception: Optional[Exception] = ..., - ) -> None: ... - def _raise_error( - self, status_code: int, raw_data: str, content_type: Optional[str] - ) -> NoReturn: ... - def _get_default_user_agent(self) -> str: ... - @staticmethod - def default_ca_certs() -> Optional[str]: ... diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index 4401ade0..5b1e9a9c 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -25,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from six import string_types import opensearchpy @@ -37,11 +39,11 @@ class Connections(object): singleton in this module. """ - def __init__(self): - self._kwargs = {} - self._conns = {} + def __init__(self) -> None: + self._kwargs: Any = {} + self._conns: Any = {} - def configure(self, **kwargs): + def configure(self, **kwargs: Any) -> None: """ Configure multiple connections at once, useful for passing in config dictionaries obtained from other sources, like Django's settings or a @@ -64,13 +66,13 @@ def configure(self, **kwargs): del self._conns[k] self._kwargs = kwargs - def add_connection(self, alias, conn): + def add_connection(self, alias: str, conn: Any) -> None: """ Add a connection object, it will be passed through as-is. """ self._conns[alias] = conn - def remove_connection(self, alias): + def remove_connection(self, alias: str) -> None: """ Remove connection from the registry. Raises ``KeyError`` if connection wasn't found. @@ -85,7 +87,7 @@ def remove_connection(self, alias): if errors == 2: raise KeyError("There is no connection with alias %r." % alias) - def create_connection(self, alias="default", **kwargs): + def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ Construct an instance of ``opensearchpy.OpenSearch`` and register it under given alias. @@ -94,7 +96,7 @@ def create_connection(self, alias="default", **kwargs): conn = self._conns[alias] = opensearchpy.OpenSearch(**kwargs) return conn - def get_connection(self, alias="default"): + def get_connection(self, alias: str = "default") -> Any: """ Retrieve a connection, construct it if necessary (only configuration was passed to us). If a non-string alias has been passed through we diff --git a/opensearchpy/connection/connections.pyi b/opensearchpy/connection/connections.pyi deleted file mode 100644 index d763f57c..00000000 --- a/opensearchpy/connection/connections.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# THIS FILE IS AUTOMATICALLY GENERATED, DO NOT EDIT. - -class Connections(object): ... diff --git a/opensearchpy/connection/http_async.py b/opensearchpy/connection/http_async.py index b7288005..d6ee57ee 100644 --- a/opensearchpy/connection/http_async.py +++ b/opensearchpy/connection/http_async.py @@ -14,8 +14,9 @@ import os import ssl import warnings +from typing import Any, Collection, Mapping, Optional, Union -from .._async._extra_imports import aiohttp, aiohttp_exceptions +from .._async._extra_imports import aiohttp, aiohttp_exceptions # type: ignore from .._async.compat import get_running_loop from .._async.http_aiohttp import AIOHttpConnection from ..compat import reraise_exceptions, string_types, urlencode @@ -31,27 +32,29 @@ class AsyncHttpConnection(AIOHttpConnection): + session: Optional[aiohttp.ClientSession] + def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_fingerprint=None, - maxsize=10, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - loop=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_fingerprint: Any = None, + maxsize: Optional[int] = 10, + headers: Optional[Mapping[str, str]] = None, + ssl_context: Any = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + loop: Any = None, + **kwargs: Any + ) -> None: self.headers = {} super().__init__( @@ -68,7 +71,7 @@ def __init__( if isinstance(http_auth, (tuple, list)): http_auth = aiohttp.BasicAuth(login=http_auth[0], password=http_auth[1]) elif isinstance(http_auth, string_types): - login, password = http_auth.split(":", 1) + login, password = http_auth.split(":", 1) # type: ignore http_auth = aiohttp.BasicAuth(login=login, password=password) # if providing an SSL context, raise error if any other SSL related flag is used @@ -146,8 +149,15 @@ def __init__( self._ssl_context = ssl_context async def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: if self.session is None: await self._create_aiohttp_session() assert self.session is not None @@ -262,14 +272,14 @@ async def perform_request( return response.status, response.headers, raw_data - async def close(self): + async def close(self) -> Any: """ Explicitly closes connection """ if self.session: await self.session.close() - async def _create_aiohttp_session(self): + async def _create_aiohttp_session(self) -> Any: """Creates an aiohttp.ClientSession(). This is delayed until the first call to perform_request() so that AsyncTransport has a chance to set AIOHttpConnection.loop @@ -289,9 +299,9 @@ async def _create_aiohttp_session(self): ) -class OpenSearchClientResponse(aiohttp.ClientResponse): - async def text(self, encoding=None, errors="strict"): +class OpenSearchClientResponse(aiohttp.ClientResponse): # type: ignore + async def text(self, encoding: Any = None, errors: str = "strict") -> Any: if self._body is None: await self.read() - return self._body.decode("utf-8", "surrogatepass") + return self._body.decode("utf-8", "surrogatepass") # type: ignore diff --git a/opensearchpy/connection/http_async.pyi b/opensearchpy/connection/http_async.pyi deleted file mode 100644 index 9fcfb246..00000000 --- a/opensearchpy/connection/http_async.pyi +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Mapping, Optional - -from .._async._extra_imports import aiohttp # type: ignore -from .._async.http_aiohttp import AIOHttpConnection - -class AsyncHttpConnection(AIOHttpConnection): - session: Optional[aiohttp.ClientSession] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: Optional[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - loop: Optional[Any] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/http_requests.py b/opensearchpy/connection/http_requests.py index f9e9b1a1..a966631d 100644 --- a/opensearchpy/connection/http_requests.py +++ b/opensearchpy/connection/http_requests.py @@ -28,6 +28,7 @@ import time import warnings +from typing import Any, Collection, Mapping, Optional, Union try: import requests @@ -73,21 +74,21 @@ class RequestsHttpConnection(Connection): def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=True, - ssl_show_warn=True, - ca_certs=None, - client_cert=None, - client_key=None, - headers=None, - http_compress=None, - opaque_id=None, - pool_maxsize=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: bool = True, + ssl_show_warn: bool = True, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + headers: Any = None, + http_compress: Any = None, + opaque_id: Any = None, + pool_maxsize: Any = None, + **kwargs: Any + ) -> None: if not REQUESTS_AVAILABLE: raise ImproperlyConfigured( "Please install requests to use RequestsHttpConnection." @@ -116,13 +117,13 @@ def __init__( if not self.http_compress: # Need to set this to 'None' otherwise Requests adds its own. - self.session.headers["accept-encoding"] = None + self.session.headers["accept-encoding"] = None # type: ignore if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = tuple(http_auth) elif isinstance(http_auth, string_types): - http_auth = tuple(http_auth.split(":", 1)) + http_auth = tuple(http_auth.split(":", 1)) # type: ignore self.session.auth = http_auth self.base_url = "%s%s" % ( @@ -147,7 +148,7 @@ def __init__( self.session.verify = ca_certs if not ssl_show_warn: - requests.packages.urllib3.disable_warnings() + requests.packages.urllib3.disable_warnings() # type: ignore if self.use_ssl and not verify_certs and ssl_show_warn: warnings.warn( @@ -155,17 +156,17 @@ def __init__( % self.host ) - def perform_request( + def perform_request( # type: ignore self, - method, - url, - params=None, - body=None, - timeout=None, - allow_redirects=True, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + allow_redirects: Optional[bool] = True, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: url = self.base_url + url headers = headers or {} if params: @@ -174,7 +175,7 @@ def perform_request( orig_body = body if self.http_compress and body: body = self._gzip_compress(body) - headers["content-encoding"] = "gzip" + headers["content-encoding"] = "gzip" # type: ignore start = time.time() request = requests.Request(method=method, headers=headers, url=url, data=body) @@ -182,7 +183,7 @@ def perform_request( settings = self.session.merge_environment_settings( prepared_request.url, {}, None, None, None ) - send_kwargs = { + send_kwargs: Any = { "timeout": timeout or self.timeout, "allow_redirects": allow_redirects, } @@ -247,10 +248,10 @@ def perform_request( return response.status_code, response.headers, raw_data @property - def headers(self): + def headers(self) -> Any: # type: ignore return self.session.headers - def close(self): + def close(self) -> None: """ Explicitly closes connections """ diff --git a/opensearchpy/connection/http_requests.pyi b/opensearchpy/connection/http_requests.pyi deleted file mode 100644 index 61b6d496..00000000 --- a/opensearchpy/connection/http_requests.pyi +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Mapping, Optional - -import requests - -from .base import Connection - -class RequestsHttpConnection(Connection): - session: requests.Session - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - headers: Optional[Mapping[str, str]] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index bde689ae..2a5ccd3b 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -28,12 +28,12 @@ import ssl import time import warnings -from typing import Callable +from typing import Any, Callable, Collection, Mapping, Optional, Union -import urllib3 # type: ignore +import urllib3 from urllib3.exceptions import ReadTimeoutError -from urllib3.exceptions import SSLError as UrllibSSLError # type: ignore -from urllib3.util.retry import Retry # type: ignore +from urllib3.exceptions import SSLError as UrllibSSLError +from urllib3.util.retry import Retry from ..compat import reraise_exceptions, urlencode from ..exceptions import ( @@ -51,7 +51,7 @@ SSL_SHOW_WARN_DEFAULT = object() -def create_ssl_context(**kwargs): +def create_ssl_context(**kwargs: Any) -> Any: """ A helper function around creating an SSL context @@ -99,25 +99,25 @@ class Urllib3HttpConnection(Connection): def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_hostname=None, - ssl_assert_fingerprint=None, - pool_maxsize=None, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_hostname: Any = None, + ssl_assert_fingerprint: Any = None, + pool_maxsize: Any = None, + headers: Any = None, + ssl_context: Any = None, + http_compress: Any = None, + opaque_id: Any = None, + **kwargs: Any + ) -> None: # Initialize headers before calling super().__init__(). self.headers = urllib3.make_headers(keep_alive=True) @@ -133,7 +133,7 @@ def __init__( self.http_auth = http_auth if self.http_auth is not None: - if isinstance(self.http_auth, Callable): + if isinstance(self.http_auth, Callable): # type: ignore pass elif isinstance(self.http_auth, (tuple, list)): self.headers.update( @@ -142,7 +142,7 @@ def __init__( else: self.headers.update(urllib3.make_headers(basic_auth=http_auth)) - pool_class = urllib3.HTTPConnectionPool + pool_class: Any = urllib3.HTTPConnectionPool kw = {} # if providing an SSL context, raise error if any other SSL related flag is used @@ -220,8 +220,15 @@ def __init__( ) def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: url = self.url_prefix + url if params: url = "%s?%s" % (url, urlencode(params)) @@ -251,7 +258,7 @@ def perform_request( request_headers["content-encoding"] = "gzip" if self.http_auth is not None: - if isinstance(self.http_auth, Callable): + if isinstance(self.http_auth, Callable): # type: ignore request_headers.update(self.http_auth(method, full_url, body)) response = self.pool.urlopen( @@ -292,10 +299,10 @@ def perform_request( return response.status, response.headers, raw_data - def get_response_headers(self, response): + def get_response_headers(self, response: Any) -> Any: return {header.lower(): value for header, value in response.headers.items()} - def close(self): + def close(self) -> None: """ Explicitly closes connection """ diff --git a/opensearchpy/connection/http_urllib3.pyi b/opensearchpy/connection/http_urllib3.pyi deleted file mode 100644 index 7fe27617..00000000 --- a/opensearchpy/connection/http_urllib3.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import ssl -from typing import Any, Mapping, Optional, Union - -import urllib3 - -from .base import Connection - -def create_ssl_context( - cafile: Any = ..., - capath: Any = ..., - cadata: Any = ..., -) -> ssl.SSLContext: ... - -class Urllib3HttpConnection(Connection): - pool: urllib3.HTTPConnectionPool - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - url_prefix: str = ..., - timeout: Optional[Union[float, int]] = ..., - http_auth: Any = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_hostname: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: int = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/pooling.py b/opensearchpy/connection/pooling.py index 48503a58..87bd8c72 100644 --- a/opensearchpy/connection/pooling.py +++ b/opensearchpy/connection/pooling.py @@ -26,6 +26,8 @@ # under the License. +from typing import Any + from .base import Connection try: @@ -35,6 +37,8 @@ class PoolingConnection(Connection): + _free_connections: queue.Queue[Connection] + """ Base connection class for connections that use libraries without thread safety and no capacity for connection pooling. To use this just implement a @@ -42,23 +46,23 @@ class PoolingConnection(Connection): it. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self._free_connections = queue.Queue() super(PoolingConnection, self).__init__(*args, **kwargs) - def _make_connection(self): + def _make_connection(self) -> Connection: raise NotImplementedError - def _get_connection(self): + def _get_connection(self) -> Connection: try: return self._free_connections.get_nowait() except queue.Empty: return self._make_connection() - def _release_connection(self, con): + def _release_connection(self, con: Connection) -> None: self._free_connections.put(con) - def close(self): + def close(self) -> None: """ Explicitly close connection """ diff --git a/opensearchpy/connection/pooling.pyi b/opensearchpy/connection/pooling.pyi deleted file mode 100644 index 53e38f40..00000000 --- a/opensearchpy/connection/pooling.pyi +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .base import Connection - -class PoolingConnection(Connection): - def _make_connection(self) -> Connection: ... - def _get_connection(self) -> Connection: ... - def _release_connection(self, con: Connection) -> None: ... - def close(self) -> None: ... diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index 7ff15512..defef6f5 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -30,15 +30,13 @@ import random import threading import time +from queue import Empty, PriorityQueue +from typing import Any, Dict, Optional, Sequence, Tuple, Type -try: - from Queue import Empty, PriorityQueue -except ImportError: - from queue import PriorityQueue, Empty - +from .connection import Connection from .exceptions import ImproperlyConfigured -logger = logging.getLogger("opensearch") +logger: logging.Logger = logging.getLogger("opensearch") class ConnectionSelector(object): @@ -60,13 +58,13 @@ class ConnectionSelector(object): connections where there would be none in its zones. """ - def __init__(self, opts): + def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: """ :arg opts: dictionary of connection instances and their options """ self.connection_opts = opts - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> None: """ Select a connection from the given list. @@ -80,7 +78,7 @@ class RandomSelector(ConnectionSelector): Select a connection at random """ - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> Any: return random.choice(connections) @@ -89,11 +87,11 @@ class RoundRobinSelector(ConnectionSelector): Selector using round-robin. """ - def __init__(self, opts): + def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: super(RoundRobinSelector, self).__init__(opts) self.data = threading.local() - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> Any: self.data.rr = getattr(self.data, "rr", -1) + 1 self.data.rr %= len(connections) return connections[self.data.rr] @@ -122,15 +120,24 @@ class ConnectionPool(object): succeeds will be marked as live (its fail count will be deleted). """ + connections_opts: Sequence[Tuple[Connection, Any]] + connections: Any + orig_connections: Tuple[Connection, ...] + dead: Any + dead_count: Dict[Connection, int] + dead_timeout: float + timeout_cutoff: int + selector: Any + def __init__( self, - connections, - dead_timeout=60, - timeout_cutoff=5, - selector_class=RoundRobinSelector, - randomize_hosts=True, - **kwargs - ): + connections: Any, + dead_timeout: float = 60, + timeout_cutoff: int = 5, + selector_class: Type[ConnectionSelector] = RoundRobinSelector, + randomize_hosts: bool = True, + **kwargs: Any + ) -> None: """ :arg connections: list of tuples containing the :class:`~opensearchpy.Connection` instance and its options @@ -164,9 +171,9 @@ def __init__( self.dead_timeout = dead_timeout self.timeout_cutoff = timeout_cutoff - self.selector = selector_class(dict(connections)) + self.selector = selector_class(dict(connections)) # type: ignore - def mark_dead(self, connection, now=None): + def mark_dead(self, connection: Connection, now: Optional[float] = None) -> None: """ Mark the connection as dead (failed). Remove it from the live pool and put it on a timeout. @@ -196,7 +203,7 @@ def mark_dead(self, connection, now=None): timeout, ) - def mark_live(self, connection): + def mark_live(self, connection: Connection) -> None: """ Mark connection as healthy after a resurrection. Resets the fail counter for the connection. @@ -209,7 +216,7 @@ def mark_live(self, connection): # race condition, safe to ignore pass - def resurrect(self, force=False): + def resurrect(self, force: bool = False) -> Any: """ Attempt to resurrect a connection from the dead pool. It will try to locate one (not all) eligible (its timeout is over) connection to @@ -251,7 +258,7 @@ def resurrect(self, force=False): logger.info("Resurrecting connection %r (force=%s).", connection, force) return connection - def get_connection(self): + def get_connection(self) -> Any: """ Return a connection from the pool using the `ConnectionSelector` instance. @@ -276,38 +283,38 @@ def get_connection(self): # only one connection, no need for a selector return connections[0] - def close(self): + def close(self) -> Any: """ Explicitly closes connections """ for conn in self.connections: conn.close() - def __repr__(self): + def __repr__(self) -> str: return "<%s: %r>" % (type(self).__name__, self.connections) class DummyConnectionPool(ConnectionPool): - def __init__(self, connections, **kwargs): + def __init__(self, connections: Any, **kwargs: Any) -> None: if len(connections) != 1: raise ImproperlyConfigured( "DummyConnectionPool needs exactly one " "connection defined." ) # we need connection opts for sniffing logic self.connection_opts = connections - self.connection = connections[0][0] + self.connection: Any = connections[0][0] self.connections = (self.connection,) - def get_connection(self): + def get_connection(self) -> Any: return self.connection - def close(self): + def close(self) -> None: """ Explicitly closes connections """ self.connection.close() - def _noop(self, *args, **kwargs): + def _noop(self, *args: Any, **kwargs: Any) -> Any: pass mark_dead = mark_live = resurrect = _noop @@ -316,14 +323,14 @@ def _noop(self, *args, **kwargs): class EmptyConnectionPool(ConnectionPool): """A connection pool that is empty. Errors out if used.""" - def __init__(self, *_, **__): + def __init__(self, *_: Any, **__: Any) -> None: self.connections = [] self.connection_opts = [] - def get_connection(self): + def get_connection(self) -> Connection: raise ImproperlyConfigured("No connections were configured") - def _noop(self, *args, **kwargs): + def _noop(self, *args: Any, **kwargs: Any) -> Any: pass close = mark_dead = mark_live = resurrect = _noop diff --git a/opensearchpy/connection_pool.pyi b/opensearchpy/connection_pool.pyi deleted file mode 100644 index e219591c..00000000 --- a/opensearchpy/connection_pool.pyi +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union - -from .connection import Connection - -try: - from Queue import PriorityQueue -except ImportError: - from queue import PriorityQueue - -logger: logging.Logger - -class ConnectionSelector(object): - connection_opts: Sequence[Tuple[Connection, Any]] - def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: ... - def select(self, connections: Sequence[Connection]) -> Connection: ... - -class RandomSelector(ConnectionSelector): ... -class RoundRobinSelector(ConnectionSelector): ... - -class ConnectionPool(object): - connections_opts: Sequence[Tuple[Connection, Any]] - connections: Sequence[Connection] - orig_connections: Tuple[Connection, ...] - dead: PriorityQueue - dead_count: Dict[Connection, int] - dead_timeout: float - timeout_cutoff: int - selector: ConnectionSelector - def __init__( - self, - connections: Sequence[Tuple[Connection, Any]], - dead_timeout: float = ..., - timeout_cutoff: int = ..., - selector_class: Type[ConnectionSelector] = ..., - randomize_hosts: bool = ..., - **kwargs: Any - ) -> None: ... - def mark_dead(self, connection: Connection, now: Optional[float] = ...) -> None: ... - def mark_live(self, connection: Connection) -> None: ... - def resurrect(self, force: bool = ...) -> Optional[Connection]: ... - def get_connection(self) -> Connection: ... - def close(self) -> None: ... - def __repr__(self) -> str: ... - -class DummyConnectionPool(ConnectionPool): - def __init__( - self, connections: Sequence[Tuple[Connection, Any]], **kwargs: Any - ) -> None: ... - def get_connection(self) -> Connection: ... - def close(self) -> None: ... - def _noop(self, *args: Any, **kwargs: Any) -> Any: ... - mark_dead = mark_live = resurrect = _noop - -class EmptyConnectionPool(ConnectionPool): - def __init__(self, *_: Any, **__: Any) -> None: ... - def get_connection(self) -> Connection: ... - def _noop(self, *args: Any, **kwargs: Any) -> Any: ... - close = mark_dead = mark_live = resurrect = _noop diff --git a/opensearchpy/exceptions.py b/opensearchpy/exceptions.py index f2f994ca..58d29bdf 100644 --- a/opensearchpy/exceptions.py +++ b/opensearchpy/exceptions.py @@ -26,6 +26,8 @@ # under the License. +from typing import Any, Dict, Type, Union + __all__ = [ "ImproperlyConfigured", "OpenSearchException", @@ -76,32 +78,33 @@ class TransportError(OpenSearchException): """ @property - def status_code(self): + def status_code(self) -> Union[str, int]: """ The HTTP status code of the response that precipitated the error or ``'N/A'`` if not applicable. """ - return self.args[0] + return self.args[0] # type: ignore @property - def error(self): + def error(self) -> str: """A string error message.""" - return self.args[1] + return self.args[1] # type: ignore @property - def info(self): + def info(self) -> Union[Dict[str, Any], Exception, Any]: """ Dict of returned error info from OpenSearch, where available, underlying exception when not. """ return self.args[2] - def __str__(self): + def __str__(self) -> str: cause = "" try: - if self.info and "error" in self.info: - if isinstance(self.info["error"], dict): - root_cause = self.info["error"]["root_cause"][0] + if self.info and isinstance(self.info, dict) and "error" in self.info: + error = self.info["error"] + if isinstance(error, dict): + root_cause = error["root_cause"][0] cause = ", ".join( filter( None, @@ -128,7 +131,7 @@ class ConnectionError(TransportError): implementation is available as ``.info``. """ - def __str__(self): + def __str__(self) -> str: return "ConnectionError(%s) caused by: %s(%s)" % ( self.error, self.info.__class__.__name__, @@ -143,7 +146,7 @@ class SSLError(ConnectionError): class ConnectionTimeout(ConnectionError): """A network timeout. Doesn't cause a node retry by default.""" - def __str__(self): + def __str__(self) -> str: return "ConnectionTimeout caused by - %s(%s)" % ( self.info.__class__.__name__, self.info, @@ -199,7 +202,7 @@ class OpenSearchWarning(Warning): # more generic mappings from status_code to python exceptions -HTTP_EXCEPTIONS = { +HTTP_EXCEPTIONS: Dict[int, Type[OpenSearchException]] = { 400: RequestError, 401: AuthenticationException, 403: AuthorizationException, diff --git a/opensearchpy/exceptions.pyi b/opensearchpy/exceptions.pyi deleted file mode 100644 index 0ecacc6f..00000000 --- a/opensearchpy/exceptions.pyi +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Dict, Union - -class ImproperlyConfigured(Exception): ... -class OpenSearchException(Exception): ... -class SerializationError(OpenSearchException): ... - -class TransportError(OpenSearchException): - @property - def status_code(self) -> Union[str, int]: ... - @property - def error(self) -> str: ... - @property - def info(self) -> Union[Dict[str, Any], Exception, Any]: ... - def __str__(self) -> str: ... - -class ConnectionError(TransportError): - def __str__(self) -> str: ... - -class SSLError(ConnectionError): ... - -class ConnectionTimeout(ConnectionError): - def __str__(self) -> str: ... - -class NotFoundError(TransportError): ... -class ConflictError(TransportError): ... -class RequestError(TransportError): ... -class AuthenticationException(TransportError): ... -class AuthorizationException(TransportError): ... -class OpenSearchDslException(Exception): ... -class UnknownDslObject(OpenSearchDslException): ... -class ValidationException(ValueError, OpenSearchDslException): ... -class IllegalOperation(OpenSearchDslException): ... -class OpenSearchWarning(Warning): ... - -OpenSearchDeprecationWarning = OpenSearchWarning - -HTTP_EXCEPTIONS: Dict[int, OpenSearchException] diff --git a/opensearchpy/helpers/__init__.pyi b/opensearchpy/helpers/__init__.pyi deleted file mode 100644 index 24c0d13d..00000000 --- a/opensearchpy/helpers/__init__.pyi +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys - -from .._async.helpers.actions import async_bulk as async_bulk -from .._async.helpers.actions import async_reindex as async_reindex -from .._async.helpers.actions import async_scan as async_scan -from .._async.helpers.actions import async_streaming_bulk as async_streaming_bulk -from .actions import _chunk_actions as _chunk_actions -from .actions import _process_bulk_chunk as _process_bulk_chunk -from .actions import bulk as bulk -from .actions import expand_action as expand_action -from .actions import parallel_bulk as parallel_bulk -from .actions import reindex as reindex -from .actions import scan as scan -from .actions import streaming_bulk as streaming_bulk -from .asyncsigner import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth -from .errors import BulkIndexError as BulkIndexError -from .errors import ScanError as ScanError -from .signer import AWSV4SignerAuth as AWSV4SignerAuth -from .signer import RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index 587444a3..39e3cdaf 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -29,6 +29,7 @@ import logging import time from operator import methodcaller +from typing import Any, Optional from ..compat import Mapping, Queue, map, string_types from ..exceptions import TransportError @@ -37,7 +38,7 @@ logger = logging.getLogger("opensearchpy.helpers") -def expand_action(data): +def expand_action(data: Any) -> Any: """ From one document or action definition passed in by the user extract the action/data lines needed for opensearch's @@ -50,7 +51,7 @@ def expand_action(data): # make sure we don't alter the action data = data.copy() op_type = data.pop("_op_type", "index") - action = {op_type: {}} + action: Any = {op_type: {}} # If '_source' is a dict use it for source # otherwise if op_type == 'update' then @@ -105,17 +106,17 @@ def expand_action(data): class _ActionChunker: - def __init__(self, chunk_size, max_chunk_bytes, serializer): + def __init__(self, chunk_size: int, max_chunk_bytes: int, serializer: Any) -> None: self.chunk_size = chunk_size self.max_chunk_bytes = max_chunk_bytes self.serializer = serializer self.size = 0 self.action_count = 0 - self.bulk_actions = [] - self.bulk_data = [] + self.bulk_actions: Any = [] + self.bulk_data: Any = [] - def feed(self, action, data): + def feed(self, action: Any, data: Any) -> Any: ret = None raw_data, raw_action = data, action action = self.serializer.dumps(action) @@ -146,7 +147,7 @@ def feed(self, action, data): self.action_count += 1 return ret - def flush(self): + def flush(self) -> Any: ret = None if self.bulk_actions: ret = (self.bulk_data, self.bulk_actions) @@ -154,7 +155,9 @@ def flush(self): return ret -def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): +def _chunk_actions( + actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Any +) -> Any: """ Split actions into chunks by number or size, serialize them into strings in the process. @@ -171,7 +174,9 @@ def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): yield ret -def _process_bulk_chunk_success(resp, bulk_data, ignore_status, raise_on_error=True): +def _process_bulk_chunk_success( + resp: Any, bulk_data: Any, ignore_status: Any = (), raise_on_error: bool = True +) -> Any: # if raise on error is set, we need to collect errors per chunk before raising them errors = [] @@ -198,8 +203,12 @@ def _process_bulk_chunk_success(resp, bulk_data, ignore_status, raise_on_error=T def _process_bulk_chunk_error( - error, bulk_data, ignore_status, raise_on_exception=True, raise_on_error=True -): + error: Any, + bulk_data: Any, + ignore_status: Any = (), + raise_on_exception: bool = True, + raise_on_error: bool = True, +) -> Any: # default behavior - just propagate exception if raise_on_exception and error.status_code not in ignore_status: raise error @@ -228,15 +237,15 @@ def _process_bulk_chunk_error( def _process_bulk_chunk( - client, - bulk_actions, - bulk_data, - raise_on_exception=True, - raise_on_error=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + bulk_actions: Any, + bulk_data: Any, + raise_on_exception: bool = True, + raise_on_error: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Send a bulk request to opensearch and process the output. """ @@ -266,21 +275,21 @@ def _process_bulk_chunk( def streaming_bulk( - client, - actions, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - raise_on_error=True, - expand_action_callback=expand_action, - raise_on_exception=True, - max_retries=0, - initial_backoff=2, - max_backoff=600, - yield_ok=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + raise_on_error: bool = True, + expand_action_callback: Any = expand_action, + raise_on_exception: bool = True, + max_retries: int = 0, + initial_backoff: int = 2, + max_backoff: int = 600, + yield_ok: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -320,7 +329,8 @@ def streaming_bulk( actions, chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): - to_retry, to_retry_data = [], [] + to_retry: Any = [] + to_retry_data: Any = [] if attempt: time.sleep(min(max_backoff, initial_backoff * 2 ** (attempt - 1))) @@ -369,7 +379,14 @@ def streaming_bulk( bulk_actions, bulk_data = to_retry, to_retry_data -def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): +def bulk( + client: Any, + actions: Any, + stats_only: bool = False, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Helper for the :meth:`~opensearchpy.OpenSearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and @@ -405,9 +422,7 @@ def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True - for ok, item in streaming_bulk( - client, actions, ignore_status=ignore_status, *args, **kwargs - ): + for ok, item in streaming_bulk(client, actions, ignore_status=ignore_status, *args, **kwargs): # type: ignore # go through request-response pairs and detect failures if not ok: if not stats_only: @@ -420,17 +435,17 @@ def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): def parallel_bulk( - client, - actions, - thread_count=4, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - queue_size=4, - expand_action_callback=expand_action, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + thread_count: int = 4, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + queue_size: int = 4, + expand_action_callback: Any = expand_action, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Parallel version of the bulk helper run in multiple threads at once. @@ -457,11 +472,11 @@ def parallel_bulk( actions = map(expand_action_callback, actions) class BlockingPool(ThreadPool): - def _setup_queues(self): + def _setup_queues(self) -> None: super(BlockingPool, self)._setup_queues() # type: ignore # The queue must be at least the size of the number of threads to # prevent hanging when inserting sentinel values during teardown. - self._inqueue = Queue(max(queue_size, thread_count)) + self._inqueue: Any = Queue(max(queue_size, thread_count)) self._quick_put = self._inqueue.put pool = BlockingPool(thread_count) @@ -470,12 +485,7 @@ def _setup_queues(self): for result in pool.imap( lambda bulk_chunk: list( _process_bulk_chunk( - client, - bulk_chunk[1], - bulk_chunk[0], - ignore_status=ignore_status, - *args, - **kwargs + client, bulk_chunk[1], bulk_chunk[0], ignore_status, *args, **kwargs ) ), _chunk_actions( @@ -491,17 +501,17 @@ def _setup_queues(self): def scan( - client, - query=None, - scroll="5m", - raise_on_error=True, - preserve_order=False, - size=1000, - request_timeout=None, - clear_scroll=True, - scroll_kwargs=None, - **kwargs -): + client: Any, + query: Any = None, + scroll: str = "5m", + raise_on_error: bool = True, + preserve_order: bool = False, + size: int = 1000, + request_timeout: Optional[float] = None, + clear_scroll: bool = True, + scroll_kwargs: Any = None, + **kwargs: Any +) -> Any: """ Simple abstraction on top of the :meth:`~opensearchpy.OpenSearch.scroll` api - a simple iterator that @@ -609,16 +619,16 @@ def scan( def reindex( - client, - source_index, - target_index, - query=None, - target_client=None, - chunk_size=500, - scroll="5m", - scan_kwargs={}, - bulk_kwargs={}, -): + client: Any, + source_index: Any, + target_index: Any, + query: Any = None, + target_client: Any = None, + chunk_size: int = 500, + scroll: str = "5m", + scan_kwargs: Any = {}, + bulk_kwargs: Any = {}, +) -> Any: """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. @@ -652,7 +662,7 @@ def reindex( target_client = client if target_client is None else target_client docs = scan(client, query=query, index=source_index, scroll=scroll, **scan_kwargs) - def _change_doc_index(hits, index): + def _change_doc_index(hits: Any, index: Any) -> Any: for h in hits: h["_index"] = index if "fields" in h: diff --git a/opensearchpy/helpers/actions.pyi b/opensearchpy/helpers/actions.pyi deleted file mode 100644 index e1ee4254..00000000 --- a/opensearchpy/helpers/actions.pyi +++ /dev/null @@ -1,137 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -import sys -from typing import ( - Any, - AsyncIterable, - Callable, - Collection, - Dict, - Generator, - Iterable, - List, - Mapping, - Optional, - Tuple, - Union, - overload, -) - -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - -from ..client import OpenSearch -from ..serializer import Serializer - -logger: logging.Logger - -def expand_action(data: Any) -> Tuple[Dict[str, Any], Optional[Any]]: ... -def _chunk_actions( - actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer -) -> Generator[Any, None, None]: ... -def _process_bulk_chunk( - client: OpenSearch, - bulk_actions: Any, - bulk_data: Any, - raise_on_exception: bool = ..., - raise_on_error: bool = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -def streaming_bulk( - client: OpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - chunk_size: int = ..., - max_chunk_bytes: int = ..., - raise_on_error: bool = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - raise_on_exception: bool = ..., - max_retries: int = ..., - initial_backoff: Union[float, int] = ..., - max_backoff: Union[float, int] = ..., - yield_ok: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -@overload -def bulk( - client: OpenSearch, - actions: Iterable[Any], - stats_only: Literal[True] = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, int]: ... -@overload -def bulk( - client: OpenSearch, - actions: Iterable[Any], - stats_only: Literal[False], - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, List[Any]]: ... -def parallel_bulk( - client: OpenSearch, - actions: Iterable[Any], - thread_count: int = ..., - chunk_size: int = ..., - max_chunk_bytes: int = ..., - queue_size: int = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -def scan( - client: OpenSearch, - query: Optional[Any] = ..., - scroll: str = ..., - raise_on_error: bool = ..., - preserve_order: bool = ..., - size: int = ..., - request_timeout: Optional[Union[float, int]] = ..., - clear_scroll: bool = ..., - scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any -) -> Generator[Any, None, None]: ... -def reindex( - client: OpenSearch, - source_index: Union[str, Collection[str]], - target_index: str, - query: Any = ..., - target_client: Optional[OpenSearch] = ..., - chunk_size: int = ..., - scroll: str = ..., - scan_kwargs: Optional[Mapping[str, Any]] = ..., - bulk_kwargs: Optional[Mapping[str, Any]] = ..., -) -> Tuple[int, Union[int, List[Any]]]: ... diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index db7d2c28..59795614 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -25,16 +25,15 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc + +import collections.abc as collections_abc +from typing import Any, Optional from .response.aggs import AggResponse, BucketData, FieldBucketData, TopHitsData from .utils import DslBase -def A(name_or_agg, filter=None, **params): +def A(name_or_agg: Any, filter: Any = None, **params: Any) -> Any: if filter is not None: if name_or_agg != "filter": raise ValueError( @@ -48,7 +47,7 @@ def A(name_or_agg, filter=None, **params): if params: raise ValueError("A() cannot accept parameters when passing in a dict.") # copy to avoid modifying in-place - agg = name_or_agg.copy() + agg = name_or_agg.copy() # type: ignore # pop out nested aggs aggs = agg.pop("aggs", None) # pop out meta data @@ -81,20 +80,20 @@ def A(name_or_agg, filter=None, **params): class Agg(DslBase): - _type_name = "agg" + _type_name: str = "agg" _type_shortcut = staticmethod(A) - name = None + name: Optional[str] = None - def __contains__(self, key): + def __contains__(self, key: Any) -> bool: return False - def to_dict(self): + def to_dict(self) -> Any: d = super(Agg, self).to_dict() if "meta" in d[self.name]: d["meta"] = d[self.name].pop("meta") return d - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return AggResponse(self, search, data) @@ -103,10 +102,10 @@ class AggBase(object): "aggs": {"type": "agg", "hash": True}, } - def __contains__(self, key): + def __contains__(self: Any, key: Any) -> bool: return key in self._params.get("aggs", {}) - def __getitem__(self, agg_name): + def __getitem__(self: Any, agg_name: Any) -> Any: agg = self._params.setdefault("aggs", {})[agg_name] # propagate KeyError # make sure we're not mutating a shared state - whenever accessing a @@ -118,13 +117,15 @@ def __getitem__(self, agg_name): return agg - def __setitem__(self, agg_name, agg): + def __setitem__(self: Any, agg_name: str, agg: Any) -> None: self.aggs[agg_name] = A(agg) - def __iter__(self): + def __iter__(self: Any) -> Any: return iter(self.aggs) - def _agg(self, bucket, name, agg_type, *args, **params): + def _agg( + self: Any, bucket: Any, name: Any, agg_type: Any, *args: Any, **params: Any + ) -> Any: agg = self[name] = A(agg_type, *args, **params) # For chaining - when creating new buckets return them... @@ -134,26 +135,26 @@ def _agg(self, bucket, name, agg_type, *args, **params): else: return self._base - def metric(self, name, agg_type, *args, **params): + def metric(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(False, name, agg_type, *args, **params) - def bucket(self, name, agg_type, *args, **params): + def bucket(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(True, name, agg_type, *args, **params) - def pipeline(self, name, agg_type, *args, **params): + def pipeline(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(False, name, agg_type, *args, **params) - def result(self, search, data): + def result(self: Any, search: Any, data: Any) -> Any: return BucketData(self, search, data) class Bucket(AggBase, Agg): - def __init__(self, **params): + def __init__(self, **params: Any) -> None: super(Bucket, self).__init__(**params) # remember self for chaining self._base = self - def to_dict(self): + def to_dict(self) -> Any: d = super(AggBase, self).to_dict() if "aggs" in d[self.name]: d["aggs"] = d[self.name].pop("aggs") @@ -161,18 +162,18 @@ def to_dict(self): class Filter(Bucket): - name = "filter" + name: Optional[str] = "filter" _param_defs = { "filter": {"type": "query"}, "aggs": {"type": "agg", "hash": True}, } - def __init__(self, filter=None, **params): + def __init__(self, filter: Any = None, **params: Any) -> None: if filter is not None: params["filter"] = filter super(Filter, self).__init__(**params) - def to_dict(self): + def to_dict(self) -> Any: d = super(Filter, self).to_dict() d[self.name].update(d[self.name].pop("filter", {})) return d @@ -184,7 +185,7 @@ class Pipeline(Agg): # bucket aggregations class Filters(Bucket): - name = "filters" + name: str = "filters" _param_defs = { "filters": {"type": "query", "hash": True}, "aggs": {"type": "agg", "hash": True}, @@ -202,7 +203,7 @@ class Parent(Bucket): class DateHistogram(Bucket): name = "date_histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -237,7 +238,7 @@ class Global(Bucket): class Histogram(Bucket): name = "histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -260,7 +261,7 @@ class Range(Bucket): class RareTerms(Bucket): name = "rare_terms" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -279,7 +280,7 @@ class SignificantText(Bucket): class Terms(Bucket): name = "terms" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -302,7 +303,7 @@ class Composite(Bucket): class VariableWidthHistogram(Bucket): name = "variable_width_histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -310,7 +311,7 @@ def result(self, search, data): class TopHits(Agg): name = "top_hits" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return TopHitsData(self, search, data) diff --git a/opensearchpy/helpers/aggs.pyi b/opensearchpy/helpers/aggs.pyi deleted file mode 100644 index 08b74a3a..00000000 --- a/opensearchpy/helpers/aggs.pyi +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from typing import Any - -from _typeshed import Incomplete - -from ..response.aggs import AggResponse as AggResponse -from ..response.aggs import BucketData as BucketData -from ..response.aggs import FieldBucketData as FieldBucketData -from ..response.aggs import TopHitsData as TopHitsData -from .utils import DslBase -from .utils import DslBase as DslBase - -def A(name_or_agg: Any, filter: Incomplete | None = ..., **params: Any) -> Any: ... - -class Agg(DslBase): ... -class AggBase(object): ... -class Bucket(AggBase, Agg): ... -class Filter(Bucket): ... -class Pipeline(Agg): ... -class Filters(Bucket): ... -class Children(Bucket): ... -class Parent(Bucket): ... -class DateHistogram(Bucket): ... -class AutoDateHistogram(DateHistogram): ... -class DateRange(Bucket): ... -class GeoDistance(Bucket): ... -class GeohashGrid(Bucket): ... -class GeotileGrid(Bucket): ... -class GeoCentroid(Bucket): ... -class Global(Bucket): ... -class Histogram(Bucket): ... -class IPRange(Bucket): ... -class Missing(Bucket): ... -class Nested(Bucket): ... -class Range(Bucket): ... -class RareTerms(Bucket): ... -class ReverseNested(Bucket): ... -class SignificantTerms(Bucket): ... -class SignificantText(Bucket): ... -class Terms(Bucket): ... -class Sampler(Bucket): ... -class DiversifiedSampler(Bucket): ... -class Composite(Bucket): ... -class VariableWidthHistogram(Bucket): ... -class TopHits(Agg): ... -class Avg(Agg): ... -class WeightedAvg(Agg): ... -class Cardinality(Agg): ... -class ExtendedStats(Agg): ... -class Boxplot(Agg): ... -class GeoBounds(Agg): ... -class Max(Agg): ... -class MedianAbsoluteDeviation(Agg): ... -class Min(Agg): ... -class Percentiles(Agg): ... -class PercentileRanks(Agg): ... -class ScriptedMetric(Agg): ... -class Stats(Agg): ... -class Sum(Agg): ... -class TTest(Agg): ... -class ValueCount(Agg): ... -class AvgBucket(Pipeline): ... -class BucketScript(Pipeline): ... -class BucketSelector(Pipeline): ... -class CumulativeSum(Pipeline): ... -class CumulativeCardinality(Pipeline): ... -class Derivative(Pipeline): ... -class ExtendedStatsBucket(Pipeline): ... -class Inference(Pipeline): ... -class MaxBucket(Pipeline): ... -class MinBucket(Pipeline): ... -class MovingFn(Pipeline): ... -class MovingAvg(Pipeline): ... -class MovingPercentiles(Pipeline): ... -class Normalize(Pipeline): ... -class PercentilesBucket(Pipeline): ... -class SerialDiff(Pipeline): ... -class StatsBucket(Pipeline): ... -class SumBucket(Pipeline): ... -class BucketSort(Pipeline): ... diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index 4e2646d7..c228acd1 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -25,17 +25,20 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Optional + import six from opensearchpy.connection.connections import get_connection -from opensearchpy.helpers.utils import AttrDict, DslBase, merge -__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"] +from .utils import AttrDict, DslBase, merge class AnalysisBase(object): @classmethod - def _type_shortcut(cls, name_or_instance, type=None, **kwargs): + def _type_shortcut( + cls: Any, name_or_instance: Any, type: Any = None, **kwargs: Any + ) -> Any: if isinstance(name_or_instance, cls): if type or kwargs: raise ValueError("%s() cannot accept parameters." % cls.__name__) @@ -50,29 +53,31 @@ def _type_shortcut(cls, name_or_instance, type=None, **kwargs): class CustomAnalysis(object): - name = "custom" + name: Optional[str] = "custom" - def __init__(self, filter_name, builtin_type="custom", **kwargs): + def __init__( + self, filter_name: str, builtin_type: str = "custom", **kwargs: Any + ) -> None: self._builtin_type = builtin_type self._name = filter_name super(CustomAnalysis, self).__init__(**kwargs) - def to_dict(self): + def to_dict(self) -> Any: # only name to present in lists return self._name - def get_definition(self): - d = super(CustomAnalysis, self).to_dict() + def get_definition(self) -> Any: + d = super(CustomAnalysis, self).to_dict() # type: ignore d = d.pop(self.name) d["type"] = self._builtin_type return d class CustomAnalysisDefinition(CustomAnalysis): - def get_analysis_definition(self): + def get_analysis_definition(self: Any) -> Any: out = {self._type_name: {self._name: self.get_definition()}} - t = getattr(self, "tokenizer", None) + t: Any = getattr(self, "tokenizer", None) if "tokenizer" in self._param_defs and hasattr(t, "get_definition"): out["tokenizer"] = {t._name: t.get_definition()} @@ -103,24 +108,24 @@ def get_analysis_definition(self): class BuiltinAnalysis(object): - name = "builtin" + name: Optional[str] = "builtin" - def __init__(self, name): + def __init__(self, name: Any) -> None: self._name = name super(BuiltinAnalysis, self).__init__() - def to_dict(self): + def to_dict(self) -> Any: # only name to present in lists return self._name class Analyzer(AnalysisBase, DslBase): - _type_name = "analyzer" - name = None + _type_name: str = "analyzer" + name: Optional[str] = None class BuiltinAnalyzer(BuiltinAnalysis, Analyzer): - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: return {} @@ -131,7 +136,13 @@ class CustomAnalyzer(CustomAnalysisDefinition, Analyzer): "tokenizer": {"type": "tokenizer"}, } - def simulate(self, text, using="default", explain=False, attributes=None): + def simulate( + self, + text: Any, + using: str = "default", + explain: bool = False, + attributes: Any = None, + ) -> Any: """ Use the Analyze API of opensearch to test the outcome of this analyzer. @@ -172,12 +183,12 @@ def simulate(self, text, using="default", explain=False, attributes=None): class Normalizer(AnalysisBase, DslBase): - _type_name = "normalizer" - name = None + _type_name: str = "normalizer" + name: Optional[str] = None class BuiltinNormalizer(BuiltinAnalysis, Normalizer): - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: return {} @@ -189,8 +200,8 @@ class CustomNormalizer(CustomAnalysisDefinition, Normalizer): class Tokenizer(AnalysisBase, DslBase): - _type_name = "tokenizer" - name = None + _type_name: str = "tokenizer" + name: Optional[str] = None class BuiltinTokenizer(BuiltinAnalysis, Tokenizer): @@ -202,8 +213,8 @@ class CustomTokenizer(CustomAnalysis, Tokenizer): class TokenFilter(AnalysisBase, DslBase): - _type_name = "token_filter" - name = None + _type_name: str = "token_filter" + name: Optional[str] = None class BuiltinTokenFilter(BuiltinAnalysis, TokenFilter): @@ -217,7 +228,7 @@ class CustomTokenFilter(CustomAnalysis, TokenFilter): class MultiplexerTokenFilter(CustomTokenFilter): name = "multiplexer" - def get_definition(self): + def get_definition(self) -> Any: d = super(CustomTokenFilter, self).get_definition() if "filters" in d: @@ -230,11 +241,11 @@ def get_definition(self): ] return d - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: if not hasattr(self, "filters"): return {} - fs = {} + fs: Any = {} d = {"filter": fs} for filters in self.filters: if isinstance(filters, six.string_types): @@ -252,7 +263,7 @@ def get_analysis_definition(self): class ConditionalTokenFilter(CustomTokenFilter): name = "condition" - def get_definition(self): + def get_definition(self) -> Any: d = super(CustomTokenFilter, self).get_definition() if "filter" in d: d["filter"] = [ @@ -260,7 +271,7 @@ def get_definition(self): ] return d - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: if not hasattr(self, "filter"): return {} @@ -274,8 +285,8 @@ def get_analysis_definition(self): class CharFilter(AnalysisBase, DslBase): - _type_name = "char_filter" - name = None + _type_name: str = "char_filter" + name: Optional[str] = None class BuiltinCharFilter(BuiltinAnalysis, CharFilter): @@ -292,3 +303,5 @@ class CustomCharFilter(CustomAnalysis, CharFilter): token_filter = TokenFilter._type_shortcut char_filter = CharFilter._type_shortcut normalizer = Normalizer._type_shortcut + +__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"] diff --git a/opensearchpy/helpers/analysis.pyi b/opensearchpy/helpers/analysis.pyi deleted file mode 100644 index 364a6ea5..00000000 --- a/opensearchpy/helpers/analysis.pyi +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import DslBase - -class AnalysisBase(object): ... -class CustomAnalysis(object): ... -class CustomAnalysisDefinition(CustomAnalysis): ... -class BuiltinAnalysis(object): ... -class Analyzer(AnalysisBase, DslBase): ... -class BuiltinAnalyzer(BuiltinAnalysis, Analyzer): ... -class CustomAnalyzer(CustomAnalysisDefinition, Analyzer): ... -class Normalizer(AnalysisBase, DslBase): ... -class BuiltinNormalizer(BuiltinAnalysis, Normalizer): ... -class CustomNormalizer(CustomAnalysisDefinition, Normalizer): ... -class Tokenizer(AnalysisBase, DslBase): ... -class BuiltinTokenizer(BuiltinAnalysis, Tokenizer): ... -class CustomTokenizer(CustomAnalysis, Tokenizer): ... -class TokenFilter(AnalysisBase, DslBase): ... -class BuiltinTokenFilter(BuiltinAnalysis, TokenFilter): ... -class CustomTokenFilter(CustomAnalysis, TokenFilter): ... -class MultiplexerTokenFilter(CustomTokenFilter): ... -class ConditionalTokenFilter(CustomTokenFilter): ... -class CharFilter(AnalysisBase, DslBase): ... -class BuiltinCharFilter(BuiltinAnalysis, CharFilter): ... -class CustomCharFilter(CustomAnalysis, CharFilter): ... diff --git a/opensearchpy/helpers/asyncsigner.py b/opensearchpy/helpers/asyncsigner.py index 7f063c9f..bd84e09e 100644 --- a/opensearchpy/helpers/asyncsigner.py +++ b/opensearchpy/helpers/asyncsigner.py @@ -8,9 +8,7 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys - -PY3 = sys.version_info[0] == 3 +from typing import Dict, Union class AWSV4SignerAsyncAuth: @@ -18,7 +16,7 @@ class AWSV4SignerAsyncAuth: AWS V4 Request Signer for Async Requests. """ - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region: str, service: str = "es") -> None: # type: ignore if not credentials: raise ValueError("Credentials cannot be empty") self.credentials = credentials @@ -31,10 +29,14 @@ def __init__(self, credentials, region, service="es"): # type: ignore raise ValueError("Service name cannot be empty") self.service = service - def __call__(self, method, url, query_string, body): # type: ignore - return self._sign_request(method, url, query_string, body) # type: ignore + def __call__( + self, method: str, url: str, query_string: str, body: Union[str, bytes] + ) -> Dict[str, str]: + return self._sign_request(method, url, query_string, body) - def _sign_request(self, method, url, query_string, body): + def _sign_request( + self, method: str, url: str, query_string: str, body: Union[str, bytes] + ) -> Dict[str, str]: """ This method helps in signing the request by injecting the required headers. :param prepared_request: unsigned headers diff --git a/opensearchpy/helpers/asyncsigner.pyi b/opensearchpy/helpers/asyncsigner.pyi deleted file mode 100644 index e0b5a7b5..00000000 --- a/opensearchpy/helpers/asyncsigner.pyi +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Dict, List - -class AWSV4SignerAsyncAuth: - @property - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - @property - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - @property - def _sign_request(self, *args: Any, **kwargs: Any) -> Dict[str, List[str]]: ... diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index de9891bc..f1673ce7 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -25,12 +25,9 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from fnmatch import fnmatch +from typing import Any, Tuple, Type from six import add_metaclass, iteritems @@ -46,12 +43,17 @@ class MetaField(object): - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self.args, self.kwargs = args, kwargs class DocumentMeta(type): - def __new__(cls, name, bases, attrs): + def __new__( + cls: Any, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: # DocumentMeta filters attrs in place attrs["_doc_type"] = DocumentOptions(name, bases, attrs) return super(DocumentMeta, cls).__new__(cls, name, bases, attrs) @@ -62,7 +64,12 @@ class IndexMeta(DocumentMeta): # class, only user defined subclasses should have an _index attr _document_initialized = False - def __new__(cls, name, bases, attrs): + def __new__( + cls: Any, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: new_cls = super(IndexMeta, cls).__new__(cls, name, bases, attrs) if cls._document_initialized: index_opts = attrs.pop("Index", None) @@ -73,7 +80,7 @@ def __new__(cls, name, bases, attrs): return new_cls @classmethod - def construct_index(cls, opts, bases): + def construct_index(cls, opts: Any, bases: Any) -> Any: if opts is None: for b in bases: if hasattr(b, "_index"): @@ -91,7 +98,12 @@ def construct_index(cls, opts, bases): class DocumentOptions(object): - def __init__(self, name, bases, attrs): + def __init__( + self, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> None: meta = attrs.pop("Meta", None) # create the mapping instance @@ -115,7 +127,7 @@ def __init__(self, name, bases, attrs): self.mapping.update(b._doc_type.mapping, update_only=True) @property - def name(self): + def name(self) -> Any: return self.mapping.properties.name @@ -126,7 +138,7 @@ class InnerDoc(ObjectBase): """ @classmethod - def from_opensearch(cls, data, data_only=False): + def from_opensearch(cls, data: Any, data_only: bool = False) -> Any: if data_only: data = {"_source": data} return super(InnerDoc, cls).from_opensearch(data) @@ -139,25 +151,25 @@ class Document(ObjectBase): """ @classmethod - def _matches(cls, hit): + def _matches(cls: Any, hit: Any) -> Any: if cls._index._name is None: return True return fnmatch(hit.get("_index", ""), cls._index._name) @classmethod - def _get_using(cls, using=None): + def _get_using(cls: Any, using: Any = None) -> Any: return using or cls._index._using @classmethod - def _get_connection(cls, using=None): + def _get_connection(cls, using: Any = None) -> Any: return get_connection(cls._get_using(using)) @classmethod - def _default_index(cls, index=None): + def _default_index(cls: Any, index: Any = None) -> Any: return index or cls._index._name @classmethod - def init(cls, index=None, using=None): + def init(cls: Any, index: Any = None, using: Any = None) -> None: """ Create the index and populate the mappings in opensearch. """ @@ -166,7 +178,7 @@ def init(cls, index=None, using=None): i = i.clone(name=index) i.save(using=using) - def _get_index(self, index=None, required=True): + def _get_index(self, index: Any = None, required: bool = True) -> Any: if index is None: index = getattr(self.meta, "index", None) if index is None: @@ -177,7 +189,7 @@ def _get_index(self, index=None, required=True): raise ValidationException("You cannot write to a wildcard index.") return index - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( @@ -188,7 +200,7 @@ def __repr__(self): ) @classmethod - def search(cls, using=None, index=None): + def search(cls, using: Any = None, index: Any = None) -> Any: """ Create an :class:`~opensearchpy.Search` instance that will search over this ``Document``. @@ -198,7 +210,7 @@ def search(cls, using=None, index=None): ) @classmethod - def get(cls, id, using=None, index=None, **kwargs): + def get(cls: Any, id: Any, using: Any = None, index: Any = None, **kwargs: Any) -> Any: # type: ignore """ Retrieve a single document from opensearch using its ``id``. @@ -217,7 +229,9 @@ def get(cls, id, using=None, index=None, **kwargs): return cls.from_opensearch(doc) @classmethod - def exists(cls, id, using=None, index=None, **kwargs): + def exists( + cls, id: Any, using: Any = None, index: Any = None, **kwargs: Any + ) -> Any: """ check if exists a single document from opensearch using its ``id``. @@ -234,13 +248,19 @@ def exists(cls, id, using=None, index=None, **kwargs): @classmethod def mget( - cls, docs, using=None, index=None, raise_on_error=True, missing="none", **kwargs - ): - r""" - Retrieve multiple document by their ``id``\s. Returns a list of instances + cls, + docs: Any, + using: Any = None, + index: Any = None, + raise_on_error: bool = True, + missing: str = "none", + **kwargs: Any, + ) -> Any: + """ + Retrieve multiple document by their ``id``'s. Returns a list of instances in the same order as requested. - :arg docs: list of ``id``\s of the documents to be retrieved or a list + :arg docs: list of ``id``'s of the documents to be retrieved or a list of document specifications as per https://opensearch.org/docs/latest/opensearch/rest-api/document-apis/multi-get/ :arg index: opensearch index to use, if the ``Document`` is @@ -264,7 +284,9 @@ def mget( } results = opensearch.mget(body, index=cls._default_index(index), **kwargs) - objs, error_docs, missing_docs = [], [], [] + objs: Any = [] + error_docs: Any = [] + missing_docs: Any = [] for doc in results["docs"]: if doc.get("found"): if error_docs or missing_docs: @@ -297,7 +319,7 @@ def mget( raise NotFoundError(404, message, {"docs": missing_docs}) return objs - def delete(self, using=None, index=None, **kwargs): + def delete(self, using: Any = None, index: Any = None, **kwargs: Any) -> Any: """ Delete the instance in opensearch. @@ -320,7 +342,7 @@ def delete(self, using=None, index=None, **kwargs): doc_meta.update(kwargs) opensearch.delete(index=self._get_index(index), **doc_meta) - def to_dict(self, include_meta=False, skip_empty=True): + def to_dict(self, include_meta: bool = False, skip_empty: bool = True) -> Any: # type: ignore """ Serialize the instance into a dictionary so that it can be saved in opensearch. @@ -348,19 +370,19 @@ def to_dict(self, include_meta=False, skip_empty=True): def update( self, - using=None, - index=None, - detect_noop=True, - doc_as_upsert=False, - refresh=False, - retry_on_conflict=None, - script=None, - script_id=None, - scripted_upsert=False, - upsert=None, - return_doc_meta=False, - **fields - ): + using: Any = None, + index: Any = None, + detect_noop: bool = True, + doc_as_upsert: bool = False, + refresh: bool = False, + retry_on_conflict: Any = None, + script: Any = None, + script_id: Any = None, + scripted_upsert: bool = False, + upsert: Any = None, + return_doc_meta: bool = False, + **fields: Any, + ) -> Any: """ Partial update of the document, specify fields you wish to update and both the instance and the document in opensearch will be updated:: @@ -389,7 +411,7 @@ def update( :return operation result noop/updated """ - body = { + body: Any = { "doc_as_upsert": doc_as_upsert, "detect_noop": detect_noop, } @@ -453,13 +475,13 @@ def update( def save( self, - using=None, - index=None, - validate=True, - skip_empty=True, - return_doc_meta=False, - **kwargs - ): + using: Any = None, + index: Any = None, + validate: bool = True, + skip_empty: bool = True, + return_doc_meta: bool = False, + **kwargs: Any, + ) -> Any: """ Save the document into opensearch. If the document doesn't exist it is created, it is overwritten otherwise. Returns ``True`` if this @@ -496,7 +518,7 @@ def save( meta = opensearch.index( index=self._get_index(index), body=self.to_dict(skip_empty=skip_empty), - **doc_meta + **doc_meta, ) # update meta information from OpenSearch for k in META_FIELDS: diff --git a/opensearchpy/helpers/document.pyi b/opensearchpy/helpers/document.pyi deleted file mode 100644 index 89ca6426..00000000 --- a/opensearchpy/helpers/document.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# THIS FILE IS AUTOMATICALLY GENERATED, DO NOT EDIT. - -from .utils import ObjectBase - -class MetaField(object): ... -class DocumentMeta(type): ... -class IndexMeta(DocumentMeta): ... -class DocumentOptions(object): ... -class InnerDoc(ObjectBase): ... -class Document(ObjectBase): ... diff --git a/opensearchpy/helpers/errors.py b/opensearchpy/helpers/errors.py index 5d05bd23..220b6b31 100644 --- a/opensearchpy/helpers/errors.py +++ b/opensearchpy/helpers/errors.py @@ -26,17 +26,21 @@ # under the License. +from typing import Any, List + from ..exceptions import OpenSearchException class BulkIndexError(OpenSearchException): @property - def errors(self): + def errors(self) -> List[Any]: """List of errors from execution of the last chunk.""" - return self.args[1] + return self.args[1] # type: ignore class ScanError(OpenSearchException): - def __init__(self, scroll_id, *args, **kwargs): - super(ScanError, self).__init__(*args, **kwargs) # type: ignore + scroll_id: str + + def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: + super(ScanError, self).__init__(*args, **kwargs) self.scroll_id = scroll_id diff --git a/opensearchpy/helpers/errors.pyi b/opensearchpy/helpers/errors.pyi deleted file mode 100644 index 9572d68f..00000000 --- a/opensearchpy/helpers/errors.pyi +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, List - -from ..exceptions import OpenSearchException - -class BulkIndexError(OpenSearchException): - @property - def errors(self) -> List[Any]: ... - -class ScanError(OpenSearchException): - scroll_id: str - def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: ... diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index e1bf9c0e..e9ae14ef 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -26,6 +26,7 @@ # under the License. from datetime import datetime, timedelta +from typing import Any, Optional from six import iteritems, itervalues @@ -53,16 +54,18 @@ class Facet(object): from the result of the aggregation. """ - agg_type = None + agg_type: Optional[str] = None - def __init__(self, metric=None, metric_sort="desc", **kwargs): + def __init__( + self, metric: Any = None, metric_sort: str = "desc", **kwargs: Any + ) -> None: self.filter_values = () self._params = kwargs self._metric = metric if metric and metric_sort: self._params["order"] = {"metric": metric_sort} - def get_aggregation(self): + def get_aggregation(self) -> Any: """ Return the aggregation object. """ @@ -71,7 +74,7 @@ def get_aggregation(self): agg.metric("metric", self._metric) return agg - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: """ Construct a filter. """ @@ -83,25 +86,25 @@ def add_filter(self, filter_values): f |= self.get_value_filter(v) return f - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: """ Construct a filter for an individual value """ pass - def is_filtered(self, key, filter_values): + def is_filtered(self, key: Any, filter_values: Any) -> bool: """ Is a filter active on the given key. """ return key in filter_values - def get_value(self, bucket): + def get_value(self, bucket: Any) -> Any: """ return a value representing a bucket. Its key as default. """ return bucket["key"] - def get_metric(self, bucket): + def get_metric(self, bucket: Any) -> Any: """ Return a metric, by default doc_count for a bucket. """ @@ -109,7 +112,7 @@ def get_metric(self, bucket): return bucket["metric"]["value"] return bucket["doc_count"] - def get_values(self, data, filter_values): + def get_values(self, data: Any, filter_values: Any) -> Any: """ Turn the raw bucket data into a list of tuples containing the key, number of documents and a flag indicating whether this value has been @@ -125,9 +128,9 @@ def get_values(self, data, filter_values): class TermsFacet(Facet): - agg_type = "terms" + agg_type: Optional[str] = "terms" - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: """Create a terms filter instead of bool containing term filters.""" if filter_values: return Terms( @@ -138,7 +141,7 @@ def add_filter(self, filter_values): class RangeFacet(Facet): agg_type = "range" - def _range_to_dict(self, range): + def _range_to_dict(self, range: Any) -> Any: key, range = range out = {"key": key} if range[0] is not None: @@ -147,13 +150,13 @@ def _range_to_dict(self, range): out["to"] = range[1] return out - def __init__(self, ranges, **kwargs): + def __init__(self, ranges: Any, **kwargs: Any) -> None: super(RangeFacet, self).__init__(**kwargs) self._params["ranges"] = list(map(self._range_to_dict, ranges)) self._params["keyed"] = False self._ranges = dict(ranges) - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: f, t = self._ranges[filter_value] limits = {} if f is not None: @@ -167,7 +170,7 @@ def get_value_filter(self, filter_value): class HistogramFacet(Facet): agg_type = "histogram" - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: return Range( _expand__to_dot=False, **{ @@ -179,25 +182,25 @@ def get_value_filter(self, filter_value): ) -def _date_interval_year(d): +def _date_interval_year(d: Any) -> Any: return d.replace( year=d.year + 1, day=(28 if d.month == 2 and d.day == 29 else d.day) ) -def _date_interval_month(d): +def _date_interval_month(d: Any) -> Any: return (d + timedelta(days=32)).replace(day=1) -def _date_interval_week(d): +def _date_interval_week(d: Any) -> Any: return d + timedelta(days=7) -def _date_interval_day(d): +def _date_interval_day(d: Any) -> Any: return d + timedelta(days=1) -def _date_interval_hour(d): +def _date_interval_hour(d: Any) -> Any: return d + timedelta(hours=1) @@ -217,22 +220,22 @@ class DateHistogramFacet(Facet): "1h": _date_interval_hour, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: kwargs.setdefault("min_doc_count", 0) super(DateHistogramFacet, self).__init__(**kwargs) - def get_value(self, bucket): + def get_value(self, bucket: Any) -> Any: if not isinstance(bucket["key"], datetime): # OpenSearch returns key=None instead of 0 for date 1970-01-01, # so we need to set key to 0 to avoid TypeError exception if bucket["key"] is None: bucket["key"] = 0 # Preserve milliseconds in the datetime - return datetime.utcfromtimestamp(int(bucket["key"]) / 1000.0) + return datetime.utcfromtimestamp(int(bucket["key"]) / 1000.0) # type: ignore else: return bucket["key"] - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: for interval_type in ("calendar_interval", "fixed_interval"): if interval_type in self._params: break @@ -255,17 +258,17 @@ def get_value_filter(self, filter_value): class NestedFacet(Facet): agg_type = "nested" - def __init__(self, path, nested_facet): + def __init__(self, path: Any, nested_facet: Any) -> None: self._path = path self._inner = nested_facet super(NestedFacet, self).__init__( path=path, aggs={"inner": nested_facet.get_aggregation()} ) - def get_values(self, data, filter_values): + def get_values(self, data: Any, filter_values: Any) -> Any: return self._inner.get_values(data.inner, filter_values) - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: inner_q = self._inner.add_filter(filter_values) if inner_q: return Nested(path=self._path, query=inner_q) @@ -273,11 +276,11 @@ def add_filter(self, filter_values): class FacetedResponse(Response): @property - def query_string(self): + def query_string(self) -> Any: return self._faceted_search._query @property - def facets(self): + def facets(self) -> Any: if not hasattr(self, "_facets"): super(AttrDict, self).__setattr__("_facets", AttrDict({})) for name, facet in iteritems(self._faceted_search.facets): @@ -330,38 +333,38 @@ def search(self): """ - index = None - doc_types = None - fields = None - facets = {} + index: Any = None + doc_types: Any = None + fields: Any = None + facets: Any = {} using = "default" - def __init__(self, query=None, filters={}, sort=()): + def __init__(self, query: Any = None, filters: Any = {}, sort: Any = ()) -> None: """ :arg query: the text to search for :arg filters: facet values to filter :arg sort: sort information to be passed to :class:`~opensearchpy.Search` """ self._query = query - self._filters = {} + self._filters: Any = {} self._sort = sort - self.filter_values = {} + self.filter_values: Any = {} for name, value in iteritems(filters): self.add_filter(name, value) self._s = self.build_search() - def count(self): + def count(self) -> Any: return self._s.count() - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: self._s = self._s[k] return self - def __iter__(self): + def __iter__(self) -> Any: return iter(self._s) - def add_filter(self, name, filter_values): + def add_filter(self, name: Any, filter_values: Any) -> Any: """ Add a filter for a facet. """ @@ -383,7 +386,7 @@ def add_filter(self, name, filter_values): self._filters[name] = f - def search(self): + def search(self) -> Any: """ Returns the base Search object to which the facets are added. @@ -393,7 +396,7 @@ def search(self): s = Search(doc_type=self.doc_types, index=self.index, using=self.using) return s.response_class(FacetedResponse) - def query(self, search, query): + def query(self, search: Any, query: Any) -> Any: """ Add query part to ``search``. @@ -406,7 +409,7 @@ def query(self, search, query): return search.query("multi_match", query=query) return search - def aggregate(self, search): + def aggregate(self, search: Any) -> Any: """ Add aggregations representing the facets selected, including potential filters. @@ -422,7 +425,7 @@ def aggregate(self, search): f, agg ) - def filter(self, search): + def filter(self, search: Any) -> Any: """ Add a ``post_filter`` to the search request narrowing the results based on the facet filters. @@ -435,7 +438,7 @@ def filter(self, search): post_filter &= f return search.post_filter(post_filter) - def highlight(self, search): + def highlight(self, search: Any) -> Any: """ Add highlighting for all the fields """ @@ -443,7 +446,7 @@ def highlight(self, search): *(f if "^" not in f else f.split("^", 1)[0] for f in self.fields) ) - def sort(self, search): + def sort(self, search: Any) -> Any: """ Add sorting information to the request. """ @@ -451,7 +454,7 @@ def sort(self, search): search = search.sort(*self._sort) return search - def build_search(self): + def build_search(self) -> Any: """ Construct the ``Search`` object. """ @@ -464,7 +467,7 @@ def build_search(self): self.aggregate(s) return s - def execute(self): + def execute(self) -> Any: """ Execute the search and return the response. """ diff --git a/opensearchpy/helpers/faceted_search.pyi b/opensearchpy/helpers/faceted_search.pyi deleted file mode 100644 index d3ff998d..00000000 --- a/opensearchpy/helpers/faceted_search.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from opensearchpy.helpers.response import Response - -class Facet(object): ... -class TermsFacet(Facet): ... -class RangeFacet(Facet): ... -class HistogramFacet(Facet): ... -class DateHistogramFacet(Facet): ... -class NestedFacet(Facet): ... -class FacetedResponse(Response): ... -class FacetedSearch(object): ... diff --git a/opensearchpy/helpers/field.py b/opensearchpy/helpers/field.py index edeaecf0..4881e819 100644 --- a/opensearchpy/helpers/field.py +++ b/opensearchpy/helpers/field.py @@ -26,15 +26,11 @@ # under the License. import base64 +import collections.abc as collections_abc import copy import ipaddress - -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - from datetime import date, datetime +from typing import Any, Optional, Type from dateutil import parser, tz from six import integer_types, iteritems, string_types @@ -45,17 +41,17 @@ from .utils import AttrDict, AttrList, DslBase from .wrappers import Range -unicode = type("") +unicode: Type[str] = type("") -def construct_field(name_or_field, **params): +def construct_field(name_or_field: Any, **params: Any) -> Any: # {"type": "text", "analyzer": "snowball"} if isinstance(name_or_field, collections_abc.Mapping): if params: raise ValueError( "construct_field() cannot accept parameters when passing in a dict." ) - params = name_or_field.copy() + params = name_or_field.copy() # type: ignore if "type" not in params: # inner object can be implicitly defined if "properties" in params: @@ -80,14 +76,16 @@ def construct_field(name_or_field, **params): class Field(DslBase): - _type_name = "field" + _type_name: str = "field" _type_shortcut = staticmethod(construct_field) # all fields can be multifields _param_defs = {"fields": {"type": "field", "hash": True}} - name = None - _coerce = False + name: Optional[str] = None + _coerce: bool = False - def __init__(self, multi=False, required=False, *args, **kwargs): + def __init__( + self, multi: bool = False, required: bool = False, *args: Any, **kwargs: Any + ) -> None: """ :arg bool multi: specifies whether field can contain array of values :arg bool required: specifies whether field is required @@ -96,29 +94,29 @@ def __init__(self, multi=False, required=False, *args, **kwargs): self._required = required super(Field, self).__init__(*args, **kwargs) - def __getitem__(self, subfield): + def __getitem__(self, subfield: Any) -> Any: return self._params.get("fields", {})[subfield] - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: return data - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return data - def _empty(self): + def _empty(self) -> None: return None - def empty(self): + def empty(self) -> Any: if self._multi: return AttrList([]) return self._empty() - def serialize(self, data): + def serialize(self, data: Any) -> Any: if isinstance(data, (list, AttrList, tuple)): return list(map(self._serialize, data)) return self._serialize(data) - def deserialize(self, data): + def deserialize(self, data: Any) -> Any: if isinstance(data, (list, AttrList, tuple)): data = [None if d is None else self._deserialize(d) for d in data] return data @@ -126,14 +124,14 @@ def deserialize(self, data): return None return self._deserialize(data) - def clean(self, data): + def clean(self, data: Any) -> Any: if data is not None: data = self.deserialize(data) if data in (None, [], {}) and self._required: raise ValidationException("Value required for this field.") return data - def to_dict(self): + def to_dict(self) -> Any: d = super(Field, self).to_dict() name, value = d.popitem() value["type"] = name @@ -144,7 +142,7 @@ class CustomField(Field): name = "custom" _coerce = True - def to_dict(self): + def to_dict(self) -> Any: if isinstance(self.builtin_type, Field): return self.builtin_type.to_dict() @@ -154,10 +152,16 @@ def to_dict(self): class Object(Field): - name = "object" - _coerce = True - - def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): + name: Optional[str] = "object" + _coerce: bool = True + + def __init__( + self, + doc_class: Any = None, + dynamic: Any = None, + properties: Any = None, + **kwargs: Any + ) -> None: """ :arg document.InnerDoc doc_class: base doc class that handles mapping. If no `doc_class` is provided, new instance of `InnerDoc` will be created, @@ -173,7 +177,7 @@ def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): "doc_class and properties/dynamic should not be provided together" ) if doc_class: - self._doc_class = doc_class + self._doc_class: Any = doc_class else: # FIXME import from opensearchpy.helpers.document import InnerDoc @@ -181,39 +185,39 @@ def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): # no InnerDoc subclass, creating one instead... self._doc_class = type("InnerDoc", (InnerDoc,), {}) for name, field in iteritems(properties or {}): - self._doc_class._doc_type.mapping.field(name, field) + self._doc_class._doc_type.mapping.field(name, field) # type: ignore if dynamic is not None: - self._doc_class._doc_type.mapping.meta("dynamic", dynamic) + self._doc_class._doc_type.mapping.meta("dynamic", dynamic) # type: ignore self._mapping = copy.deepcopy(self._doc_class._doc_type.mapping) super(Object, self).__init__(**kwargs) - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self._mapping[name] - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self._mapping - def _empty(self): + def _empty(self) -> Any: return self._wrap({}) - def _wrap(self, data): + def _wrap(self, data: Any) -> Any: return self._doc_class.from_opensearch(data, data_only=True) - def empty(self): + def empty(self) -> Any: if self._multi: return AttrList([], self._wrap) return self._empty() - def to_dict(self): + def to_dict(self) -> Any: d = self._mapping.to_dict() d.update(super(Object, self).to_dict()) return d - def _collect_fields(self): + def _collect_fields(self) -> Any: return self._mapping.properties._collect_fields() - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: # don't wrap already wrapped data if isinstance(data, self._doc_class): return data @@ -223,7 +227,7 @@ def _deserialize(self, data): return self._wrap(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None @@ -233,7 +237,7 @@ def _serialize(self, data): return data.to_dict() - def clean(self, data): + def clean(self, data: Any) -> Any: data = super(Object, self).clean(data) if data is None: return None @@ -244,7 +248,7 @@ def clean(self, data): data.full_clean() return data - def update(self, other, update_only=False): + def update(self, other: "Object", update_only: bool = False) -> None: if not isinstance(other, Object): # not an inner/nested object, no merge possible return @@ -253,18 +257,20 @@ def update(self, other, update_only=False): class Nested(Object): - name = "nested" + name: Optional[str] = "nested" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: kwargs.setdefault("multi", True) super(Nested, self).__init__(*args, **kwargs) class Date(Field): - name = "date" - _coerce = True + name: Optional[str] = "date" + _coerce: bool = True - def __init__(self, default_timezone=None, *args, **kwargs): + def __init__( + self, default_timezone: None = None, *args: Any, **kwargs: Any + ) -> None: """ :arg default_timezone: timezone that will be automatically used for tz-naive values May be instance of `datetime.tzinfo` or string containing TZ offset @@ -274,7 +280,7 @@ def __init__(self, default_timezone=None, *args, **kwargs): self._default_timezone = tz.gettz(self._default_timezone) super(Date, self).__init__(*args, **kwargs) - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, string_types): try: data = parser.parse(data) @@ -303,7 +309,7 @@ class Text(Field): "search_analyzer": {"type": "analyzer"}, "search_quote_analyzer": {"type": "analyzer"}, } - name = "text" + name: Optional[str] = "text" class SearchAsYouType(Field): @@ -312,7 +318,7 @@ class SearchAsYouType(Field): "search_analyzer": {"type": "analyzer"}, "search_quote_analyzer": {"type": "analyzer"}, } - name = "search_as_you_type" + name: Optional[str] = "search_as_you_type" class Keyword(Field): @@ -321,23 +327,23 @@ class Keyword(Field): "search_analyzer": {"type": "analyzer"}, "normalizer": {"type": "normalizer"}, } - name = "keyword" + name: Optional[str] = "keyword" class ConstantKeyword(Keyword): - name = "constant_keyword" + name: Optional[str] = "constant_keyword" class Boolean(Field): - name = "boolean" - _coerce = True + name: Optional[str] = "boolean" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if data == "false": return False return bool(data) - def clean(self, data): + def clean(self, data: Any) -> Any: if data is not None: data = self.deserialize(data) if data is None and self._required: @@ -346,108 +352,108 @@ def clean(self, data): class Float(Field): - name = "float" - _coerce = True + name: Optional[str] = "float" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return float(data) class DenseVector(Float): - name = "dense_vector" + name: Optional[str] = "dense_vector" - def __init__(self, dims, **kwargs): + def __init__(self, dims: Any, **kwargs: Any) -> None: kwargs["multi"] = True super(DenseVector, self).__init__(dims=dims, **kwargs) class SparseVector(Field): - name = "sparse_vector" + name: Optional[str] = "sparse_vector" class HalfFloat(Float): - name = "half_float" + name: Optional[str] = "half_float" class ScaledFloat(Float): - name = "scaled_float" + name: Optional[str] = "scaled_float" - def __init__(self, scaling_factor, *args, **kwargs): + def __init__(self, scaling_factor: Any, *args: Any, **kwargs: Any) -> None: super(ScaledFloat, self).__init__( scaling_factor=scaling_factor, *args, **kwargs ) class Double(Float): - name = "double" + name: Optional[str] = "double" class RankFeature(Float): - name = "rank_feature" + name: Optional[str] = "rank_feature" class RankFeatures(Field): - name = "rank_features" + name: Optional[str] = "rank_features" class Integer(Field): - name = "integer" - _coerce = True + name: Optional[str] = "integer" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return int(data) class Byte(Integer): - name = "byte" + name: Optional[str] = "byte" class Short(Integer): - name = "short" + name: Optional[str] = "short" class Long(Integer): - name = "long" + name: Optional[str] = "long" class Ip(Field): - name = "ip" - _coerce = True + name: Optional[str] = "ip" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: # the ipaddress library for pypy only accepts unicode. return ipaddress.ip_address(unicode(data)) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return str(data) class Binary(Field): - name = "binary" - _coerce = True + name: Optional[str] = "binary" + _coerce: bool = True - def clean(self, data): + def clean(self, data: Any) -> Any: # Binary fields are opaque, so there's not much cleaning # that can be done. return data - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return base64.b64decode(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return base64.b64encode(data).decode() class GeoPoint(Field): - name = "geo_point" + name: Optional[str] = "geo_point" class GeoShape(Field): - name = "geo_shape" + name: Optional[str] = "geo_shape" class Completion(Field): @@ -459,29 +465,29 @@ class Completion(Field): class Percolator(Field): - name = "percolator" - _coerce = True + name: Optional[str] = "percolator" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return Q(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return data.to_dict() class RangeField(Field): - _coerce = True - _core_field = None + _coerce: bool = True + _core_field: Any = None - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, Range): return data data = dict((k, self._core_field.deserialize(v)) for k, v in iteritems(data)) return Range(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None if not isinstance(data, collections_abc.Mapping): @@ -490,42 +496,42 @@ def _serialize(self, data): class IntegerRange(RangeField): - name = "integer_range" - _core_field = Integer() + name: Optional[str] = "integer_range" + _core_field: Any = Integer() class FloatRange(RangeField): - name = "float_range" - _core_field = Float() + name: Optional[str] = "float_range" + _core_field: Any = Float() class LongRange(RangeField): - name = "long_range" - _core_field = Long() + name: Optional[str] = "long_range" + _core_field: Any = Long() class DoubleRange(RangeField): - name = "double_range" - _core_field = Double() + name: Optional[str] = "double_range" + _core_field: Any = Double() class DateRange(RangeField): - name = "date_range" - _core_field = Date() + name: Optional[str] = "date_range" + _core_field: Any = Date() class IpRange(Field): # not a RangeField since ip_range supports CIDR ranges - name = "ip_range" + name: Optional[str] = "ip_range" class Join(Field): - name = "join" + name: Optional[str] = "join" class TokenCount(Field): - name = "token_count" + name: Optional[str] = "token_count" class Murmur3(Field): - name = "murmur3" + name: Optional[str] = "murmur3" diff --git a/opensearchpy/helpers/field.pyi b/opensearchpy/helpers/field.pyi deleted file mode 100644 index 3b448641..00000000 --- a/opensearchpy/helpers/field.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class Field(DslBase): ... -class CustomField(Field): ... -class Object(Field): ... -class Nested(Object): ... -class Date(Field): ... -class Text(Field): ... -class SearchAsYouType(Field): ... -class Keyword(Field): ... -class ConstantKeyword(Keyword): ... -class Boolean(Field): ... -class Float(Field): ... -class DenseVector(Float): ... -class SparseVector(Field): ... -class HalfFloat(Float): ... -class ScaledFloat(Float): ... -class Double(Float): ... -class RankFeature(Float): ... -class RankFeatures(Field): ... -class Integer(Field): ... -class Byte(Integer): ... -class Short(Integer): ... -class Long(Integer): ... -class Ip(Field): ... -class Binary(Field): ... -class GeoPoint(Field): ... -class GeoShape(Field): ... -class Completion(Field): ... -class Percolator(Field): ... -class RangeField(Field): ... -class IntegerRange(RangeField): ... -class FloatRange(RangeField): ... -class LongRange(RangeField): ... -class DoubleRange(RangeField): ... -class DateRange(RangeField): ... -class IpRange(Field): ... -class Join(Field): ... -class TokenCount(Field): ... -class Murmur3(Field): ... - -def construct_field(name_or_field: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index 47b7b148..00452f86 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -25,21 +25,19 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +import collections.abc as collections_abc +from typing import Any, Optional from .utils import DslBase -def SF(name_or_sf, **params): +def SF(name_or_sf: Any, **params: Any) -> Any: # {"script_score": {"script": "_score"}, "filter": {}} if isinstance(name_or_sf, collections_abc.Mapping): if params: raise ValueError("SF() cannot accept parameters when passing in a dict.") kwargs = {} - sf = name_or_sf.copy() + sf = name_or_sf.copy() # type: ignore for k in ScoreFunction._param_defs: if k in name_or_sf: kwargs[k] = sf.pop(k) @@ -74,16 +72,16 @@ def SF(name_or_sf, **params): class ScoreFunction(DslBase): - _type_name = "score_function" + _type_name: str = "score_function" _type_shortcut = staticmethod(SF) _param_defs = { "query": {"type": "query"}, "filter": {"type": "query"}, "weight": {}, } - name = None + name: Optional[str] = None - def to_dict(self): + def to_dict(self) -> Any: d = super(ScoreFunction, self).to_dict() # filter and query dicts should be at the same level as us for k in self._param_defs: @@ -99,7 +97,7 @@ class ScriptScore(ScoreFunction): class BoostFactor(ScoreFunction): name = "boost_factor" - def to_dict(self): + def to_dict(self) -> Any: d = super(BoostFactor, self).to_dict() if "value" in d[self.name]: d[self.name] = d[self.name].pop("value") diff --git a/opensearchpy/helpers/function.pyi b/opensearchpy/helpers/function.pyi deleted file mode 100644 index 72b4b342..00000000 --- a/opensearchpy/helpers/function.pyi +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class ScoreFunction(DslBase): ... -class ScriptScore(ScoreFunction): ... -class BoostFactor(ScoreFunction): ... -class RandomScore(ScoreFunction): ... -class FieldValueFactor(ScoreFunction): ... -class Linear(ScoreFunction): ... -class Gauss(ScoreFunction): ... -class Exp(ScoreFunction): ... - -def SF(name_or_sf: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index 6bbc23e8..e96136b2 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -25,6 +25,9 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Optional + +from opensearchpy.client import OpenSearch from opensearchpy.connection.connections import get_connection from opensearchpy.helpers import analysis @@ -36,7 +39,14 @@ class IndexTemplate(object): - def __init__(self, name, template, index=None, order=None, **kwargs): + def __init__( + self, + name: Any, + template: Any, + index: Any = None, + order: Any = None, + **kwargs: Any + ) -> None: if index is None: self._index = Index(template, **kwargs) else: @@ -50,17 +60,17 @@ def __init__(self, name, template, index=None, order=None, **kwargs): self._template_name = name self.order = order - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._index, attr_name) - def to_dict(self): + def to_dict(self) -> Any: d = self._index.to_dict() d["index_patterns"] = [self._index._name] if self.order is not None: d["order"] = self.order return d - def save(self, using=None): + def save(self, using: Any = None) -> Any: opensearch = get_connection(using or self._index._using) return opensearch.indices.put_template( name=self._template_name, body=self.to_dict() @@ -68,25 +78,27 @@ def save(self, using=None): class Index(object): - def __init__(self, name, using="default"): + def __init__(self, name: Any, using: str = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` """ self._name = name - self._doc_types = [] + self._doc_types: Any = [] self._using = using - self._settings = {} - self._aliases = {} - self._analysis = {} - self._mapping = None + self._settings: Any = {} + self._aliases: Any = {} + self._analysis: Any = {} + self._mapping: Any = None - def get_or_create_mapping(self): + def get_or_create_mapping(self) -> Any: if self._mapping is None: self._mapping = Mapping() return self._mapping - def as_template(self, template_name, pattern=None, order=None): + def as_template( + self, template_name: Any, pattern: Any = None, order: Any = None + ) -> Any: # TODO: should we allow pattern to be a top-level arg? # or maybe have an IndexPattern that allows for it and have # Document._index be that? @@ -94,7 +106,7 @@ def as_template(self, template_name, pattern=None, order=None): template_name, pattern or self._name, index=self, order=order ) - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: for doc in self._doc_types: nested, field = doc._doc_type.mapping.resolve_nested(field_path) if field is not None: @@ -103,7 +115,7 @@ def resolve_nested(self, field_path): return self._mapping.resolve_nested(field_path) return (), None - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: for doc in self._doc_types: field = doc._doc_type.mapping.resolve_field(field_path) if field is not None: @@ -112,12 +124,12 @@ def resolve_field(self, field_path): return self._mapping.resolve_field(field_path) return None - def load_mappings(self, using=None): + def load_mappings(self, using: Optional[OpenSearch] = None) -> None: self.get_or_create_mapping().update_from_opensearch( self._name, using=using or self._using ) - def clone(self, name=None, using=None): + def clone(self, name: Any = None, using: Any = None) -> Any: """ Create a copy of the instance with another name or connection alias. Useful for creating multiple indices with shared configuration:: @@ -141,14 +153,14 @@ def clone(self, name=None, using=None): i._mapping = self._mapping._clone() return i - def _get_connection(self, using=None): + def _get_connection(self, using: Any = None) -> Any: if self._name is None: raise ValueError("You cannot perform API calls on the default index.") return get_connection(using or self._using) connection = property(_get_connection) - def mapping(self, mapping): + def mapping(self, mapping: Any) -> Any: """ Associate a mapping (an instance of :class:`~opensearchpy.Mapping`) with this index. @@ -157,7 +169,7 @@ def mapping(self, mapping): """ self.get_or_create_mapping().update(mapping) - def document(self, document): + def document(self, document: Any) -> Any: """ Associate a :class:`~opensearchpy.Document` subclass with an index. This means that, when this index is created, it will contain the @@ -188,7 +200,7 @@ class Post(Document): return document - def settings(self, **kwargs): + def settings(self, **kwargs: Any) -> Any: """ Add settings to the index:: @@ -201,7 +213,7 @@ def settings(self, **kwargs): self._settings.update(kwargs) return self - def aliases(self, **kwargs): + def aliases(self, **kwargs: Any) -> Any: """ Add aliases to the index definition:: @@ -211,7 +223,7 @@ def aliases(self, **kwargs): self._aliases.update(kwargs) return self - def analyzer(self, *args, **kwargs): + def analyzer(self, *args: Any, **kwargs: Any) -> Any: """ Explicitly add an analyzer to an index. Note that all custom analyzers defined in mappings will also be created. This is useful for search analyzers. @@ -238,14 +250,14 @@ def analyzer(self, *args, **kwargs): # merge the definition merge(self._analysis, d, True) - def to_dict(self): + def to_dict(self) -> Any: out = {} if self._settings: out["settings"] = self._settings if self._aliases: out["aliases"] = self._aliases - mappings = self._mapping.to_dict() if self._mapping else {} - analysis = self._mapping._collect_analysis() if self._mapping else {} + mappings: Any = self._mapping.to_dict() if self._mapping else {} + analysis: Any = self._mapping._collect_analysis() if self._mapping else {} for d in self._doc_types: mapping = d._doc_type.mapping merge(mappings, mapping.to_dict(), True) @@ -257,7 +269,7 @@ def to_dict(self): out.setdefault("settings", {})["analysis"] = analysis return out - def search(self, using=None): + def search(self, using: Optional[OpenSearch] = None) -> Search: """ Return a :class:`~opensearchpy.Search` object searching over the index (or all the indices belonging to this template) and its @@ -267,7 +279,7 @@ def search(self, using=None): using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using=None): + def updateByQuery(self, using: Optional[OpenSearch] = None) -> UpdateByQuery: """ Return a :class:`~opensearchpy.UpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match @@ -281,7 +293,7 @@ def updateByQuery(self, using=None): index=self._name, ) - def create(self, using=None, **kwargs): + def create(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Creates the index in opensearch. @@ -292,13 +304,13 @@ def create(self, using=None, **kwargs): index=self._name, body=self.to_dict(), **kwargs ) - def is_closed(self, using=None): + def is_closed(self, using: Optional[OpenSearch] = None) -> Any: state = self._get_connection(using).cluster.state( index=self._name, metric="metadata" ) return state["metadata"]["indices"][self._name]["state"] == "close" - def save(self, using=None): + def save(self, using: Optional[OpenSearch] = None) -> Any: """ Sync the index definition with opensearch, creating the index if it doesn't exist and updating its settings and mappings if it does. @@ -352,7 +364,7 @@ def save(self, using=None): if mappings: self.put_mapping(using=using, body=mappings) - def analyze(self, using=None, **kwargs): + def analyze(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Perform the analysis process on a text and return the tokens breakdown of the text. @@ -362,7 +374,7 @@ def analyze(self, using=None, **kwargs): """ return self._get_connection(using).indices.analyze(index=self._name, **kwargs) - def refresh(self, using=None, **kwargs): + def refresh(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Performs a refresh operation on the index. @@ -371,7 +383,7 @@ def refresh(self, using=None, **kwargs): """ return self._get_connection(using).indices.refresh(index=self._name, **kwargs) - def flush(self, using=None, **kwargs): + def flush(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Performs a flush operation on the index. @@ -380,7 +392,7 @@ def flush(self, using=None, **kwargs): """ return self._get_connection(using).indices.flush(index=self._name, **kwargs) - def get(self, using=None, **kwargs): + def get(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The get index API allows to retrieve information about the index. @@ -389,7 +401,7 @@ def get(self, using=None, **kwargs): """ return self._get_connection(using).indices.get(index=self._name, **kwargs) - def open(self, using=None, **kwargs): + def open(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Opens the index in opensearch. @@ -398,7 +410,7 @@ def open(self, using=None, **kwargs): """ return self._get_connection(using).indices.open(index=self._name, **kwargs) - def close(self, using=None, **kwargs): + def close(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Closes the index in opensearch. @@ -407,7 +419,7 @@ def close(self, using=None, **kwargs): """ return self._get_connection(using).indices.close(index=self._name, **kwargs) - def delete(self, using=None, **kwargs): + def delete(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Deletes the index in opensearch. @@ -416,7 +428,7 @@ def delete(self, using=None, **kwargs): """ return self._get_connection(using).indices.delete(index=self._name, **kwargs) - def exists(self, using=None, **kwargs): + def exists(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Returns ``True`` if the index already exists in opensearch. @@ -425,7 +437,7 @@ def exists(self, using=None, **kwargs): """ return self._get_connection(using).indices.exists(index=self._name, **kwargs) - def put_mapping(self, using=None, **kwargs): + def put_mapping(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Register specific mapping definition for a specific type. @@ -436,7 +448,7 @@ def put_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_mapping(self, using=None, **kwargs): + def get_mapping(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve specific mapping definition for a specific type. @@ -447,7 +459,9 @@ def get_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_field_mapping(self, using=None, **kwargs): + def get_field_mapping( + self, using: Optional[OpenSearch] = None, **kwargs: Any + ) -> Any: """ Retrieve mapping definition of a specific field. @@ -458,7 +472,7 @@ def get_field_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def put_alias(self, using=None, **kwargs): + def put_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Create an alias for the index. @@ -467,7 +481,7 @@ def put_alias(self, using=None, **kwargs): """ return self._get_connection(using).indices.put_alias(index=self._name, **kwargs) - def exists_alias(self, using=None, **kwargs): + def exists_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Return a boolean indicating whether given alias exists for this index. @@ -478,7 +492,7 @@ def exists_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_alias(self, using=None, **kwargs): + def get_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve a specified alias. @@ -487,7 +501,7 @@ def get_alias(self, using=None, **kwargs): """ return self._get_connection(using).indices.get_alias(index=self._name, **kwargs) - def delete_alias(self, using=None, **kwargs): + def delete_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Delete specific alias. @@ -498,7 +512,7 @@ def delete_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_settings(self, using=None, **kwargs): + def get_settings(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve settings for the index. @@ -509,7 +523,7 @@ def get_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - def put_settings(self, using=None, **kwargs): + def put_settings(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Change specific index level settings in real time. @@ -520,7 +534,7 @@ def put_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - def stats(self, using=None, **kwargs): + def stats(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve statistics on different operations happening on the index. @@ -529,7 +543,7 @@ def stats(self, using=None, **kwargs): """ return self._get_connection(using).indices.stats(index=self._name, **kwargs) - def segments(self, using=None, **kwargs): + def segments(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Provide low level segments information that a Lucene index (shard level) is built with. @@ -539,7 +553,7 @@ def segments(self, using=None, **kwargs): """ return self._get_connection(using).indices.segments(index=self._name, **kwargs) - def validate_query(self, using=None, **kwargs): + def validate_query(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Validate a potentially expensive query without executing it. @@ -550,7 +564,7 @@ def validate_query(self, using=None, **kwargs): index=self._name, **kwargs ) - def clear_cache(self, using=None, **kwargs): + def clear_cache(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Clear all caches or specific cached associated with the index. @@ -561,7 +575,7 @@ def clear_cache(self, using=None, **kwargs): index=self._name, **kwargs ) - def recovery(self, using=None, **kwargs): + def recovery(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The indices recovery API provides insight into on-going shard recoveries for the index. @@ -571,7 +585,7 @@ def recovery(self, using=None, **kwargs): """ return self._get_connection(using).indices.recovery(index=self._name, **kwargs) - def upgrade(self, using=None, **kwargs): + def upgrade(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Upgrade the index to the latest format. @@ -580,7 +594,7 @@ def upgrade(self, using=None, **kwargs): """ return self._get_connection(using).indices.upgrade(index=self._name, **kwargs) - def get_upgrade(self, using=None, **kwargs): + def get_upgrade(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Monitor how much of the index is upgraded. @@ -591,7 +605,7 @@ def get_upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - def shard_stores(self, using=None, **kwargs): + def shard_stores(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Provides store information for shard copies of the index. Store information reports on which nodes shard copies exist, the shard copy @@ -605,7 +619,7 @@ def shard_stores(self, using=None, **kwargs): index=self._name, **kwargs ) - def forcemerge(self, using=None, **kwargs): + def forcemerge(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The force merge API allows to force merging of the index through an API. The merge relates to the number of segments a Lucene index holds @@ -623,7 +637,7 @@ def forcemerge(self, using=None, **kwargs): index=self._name, **kwargs ) - def shrink(self, using=None, **kwargs): + def shrink(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The shrink index API allows you to shrink an existing index into a new index with fewer primary shards. The number of primary shards in the diff --git a/opensearchpy/helpers/index.pyi b/opensearchpy/helpers/index.pyi deleted file mode 100644 index e2f95797..00000000 --- a/opensearchpy/helpers/index.pyi +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -class IndexTemplate(object): ... -class Index(object): ... diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index 8fd37348..eaa13e3f 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -25,12 +25,9 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from itertools import chain +from typing import Any from six import iteritems, itervalues @@ -56,26 +53,26 @@ class Properties(DslBase): name = "properties" _param_defs = {"properties": {"type": "field", "hash": True}} - def __init__(self): + def __init__(self) -> None: super(Properties, self).__init__() - def __repr__(self): + def __repr__(self) -> str: return "Properties()" - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties[name] - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self.properties - def to_dict(self): + def to_dict(self) -> Any: return super(Properties, self).to_dict()["properties"] - def field(self, name, *args, **kwargs): + def field(self, name: Any, *args: Any, **kwargs: Any) -> "Properties": self.properties[name] = construct_field(*args, **kwargs) return self - def _collect_fields(self): + def _collect_fields(self) -> Any: """Iterate over all Field objects within, including multi fields.""" for f in itervalues(self.properties.to_dict()): yield f @@ -88,7 +85,7 @@ def _collect_fields(self): for inner_f in f._collect_fields(): yield inner_f - def update(self, other_object): + def update(self, other_object: Any) -> None: if not hasattr(other_object, "properties"): # not an inner/nested object, no merge possible return @@ -103,25 +100,25 @@ def update(self, other_object): class Mapping(object): - def __init__(self): + def __init__(self) -> None: self.properties = Properties() - self._meta = {} + self._meta: Any = {} - def __repr__(self): + def __repr__(self) -> str: return "Mapping()" - def _clone(self): + def _clone(self) -> Any: m = Mapping() m.properties._params = self.properties._params.copy() return m @classmethod - def from_opensearch(cls, index, using="default"): + def from_opensearch(cls, index: Any, using: str = "default") -> Any: m = cls() m.update_from_opensearch(index, using) return m - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: field = self nested = [] parts = field_path.split(".") @@ -134,18 +131,18 @@ def resolve_nested(self, field_path): nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: field = self for step in field_path.split("."): try: field = field[step] except KeyError: - return + return None return field - def _collect_analysis(self): - analysis = {} - fields = [] + def _collect_analysis(self) -> Any: + analysis: Any = {} + fields: Any = [] if "_all" in self._meta: fields.append(Text(**self._meta["_all"])) @@ -171,20 +168,20 @@ def _collect_analysis(self): return analysis - def save(self, index, using="default"): + def save(self, index: Any, using: str = "default") -> Any: from opensearchpy.helpers.index import Index index = Index(index, using=using) index.mapping(self) return index.save() - def update_from_opensearch(self, index, using="default"): + def update_from_opensearch(self, index: Any, using: str = "default") -> None: opensearch = get_connection(using) raw = opensearch.indices.get_mapping(index=index) _, raw = raw.popitem() self._update_from_dict(raw["mappings"]) - def _update_from_dict(self, raw): + def _update_from_dict(self, raw: Any) -> None: for name, definition in iteritems(raw.get("properties", {})): self.field(name, definition) @@ -196,7 +193,7 @@ def _update_from_dict(self, raw): else: self.meta(name, value) - def update(self, mapping, update_only=False): + def update(self, mapping: Any, update_only: bool = False) -> None: for name in mapping: if update_only and name in self: # nested and inner objects, merge recursively @@ -213,20 +210,20 @@ def update(self, mapping, update_only=False): else: self._meta.update(mapping._meta) - def __contains__(self, name): + def __contains__(self, name: Any) -> Any: return name in self.properties.properties - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties.properties[name] - def __iter__(self): + def __iter__(self) -> Any: return iter(self.properties.properties) - def field(self, *args, **kwargs): + def field(self, *args: Any, **kwargs: Any) -> "Mapping": self.properties.field(*args, **kwargs) return self - def meta(self, name, params=None, **kwargs): + def meta(self, name: Any, params: Any = None, **kwargs: Any) -> "Mapping": if not name.startswith("_") and name not in META_FIELDS: name = "_" + name @@ -236,7 +233,7 @@ def meta(self, name, params=None, **kwargs): self._meta[name] = kwargs if params is None else params return self - def to_dict(self): + def to_dict(self) -> Any: meta = self._meta # hard coded serialization of analyzers in _all diff --git a/opensearchpy/helpers/mapping.pyi b/opensearchpy/helpers/mapping.pyi deleted file mode 100644 index 99a82935..00000000 --- a/opensearchpy/helpers/mapping.pyi +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import DslBase - -class Properties(DslBase): ... -class Mapping(object): ... diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index 784435d6..dc2db8a7 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -25,12 +25,9 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from itertools import chain +from typing import Any, Optional # 'SF' looks unused but the test suite assumes it's available # from this module so others are liable to do so as well. @@ -39,7 +36,7 @@ from .utils import DslBase -def Q(name_or_query="match_all", **params): +def Q(name_or_query: str = "match_all", **params: Any) -> Any: # {"match": {"title": "python"}} if isinstance(name_or_query, collections_abc.Mapping): if params: @@ -49,7 +46,7 @@ def Q(name_or_query="match_all", **params): 'Q() can only accept dict with a single query ({"match": {...}}). ' "Instead it got (%r)" % name_or_query ) - name, params = name_or_query.copy().popitem() + name, params = name_or_query.copy().popitem() # type: ignore return Query.get_dsl_class(name)(_expand__to_dot=False, **params) # MatchAll() @@ -69,28 +66,28 @@ def Q(name_or_query="match_all", **params): class Query(DslBase): - _type_name = "query" + _type_name: str = "query" _type_shortcut = staticmethod(Q) - name = None + name: Optional[str] = None - def __add__(self, other): + def __add__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__radd__"): return other.__radd__(self) return Bool(must=[self, other]) - def __invert__(self): + def __invert__(self) -> Any: return Bool(must_not=[self]) - def __or__(self, other): + def __or__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__ror__"): return other.__ror__(self) return Bool(should=[self, other]) - def __and__(self, other): + def __and__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__rand__"): @@ -101,17 +98,17 @@ def __and__(self, other): class MatchAll(Query): name = "match_all" - def __add__(self, other): + def __add__(self, other: Any) -> Any: return other._clone() __and__ = __rand__ = __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: Any) -> "MatchAll": return self __ror__ = __or__ - def __invert__(self): + def __invert__(self) -> Any: return MatchNone() @@ -121,17 +118,17 @@ def __invert__(self): class MatchNone(Query): name = "match_none" - def __add__(self, other): + def __add__(self, other: Any) -> "MatchNone": return self __and__ = __rand__ = __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: Any) -> Any: return other._clone() __ror__ = __or__ - def __invert__(self): + def __invert__(self) -> Any: return MatchAll() @@ -144,7 +141,7 @@ class Bool(Query): "filter": {"type": "query", "multi": True}, } - def __add__(self, other): + def __add__(self, other: "Bool") -> Any: q = self._clone() if isinstance(other, Bool): q.must += other.must @@ -157,7 +154,7 @@ def __add__(self, other): __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: "Bool") -> Any: for q in (self, other): if isinstance(q, Bool) and not any( (q.must, q.must_not, q.filter, getattr(q, "minimum_should_match", None)) @@ -182,14 +179,14 @@ def __or__(self, other): __ror__ = __or__ @property - def _min_should_match(self): + def _min_should_match(self) -> Any: return getattr( self, "minimum_should_match", 0 if not self.should or (self.must or self.filter) else 1, ) - def __invert__(self): + def __invert__(self) -> Any: # Because an empty Bool query is treated like # MatchAll the inverse should be MatchNone if not any(chain(self.must, self.filter, self.should, self.must_not)): @@ -209,7 +206,7 @@ def __invert__(self): return negations[0] return Bool(should=negations) - def __and__(self, other): + def __and__(self, other: "Bool") -> Any: q = self._clone() if isinstance(other, Bool): q.must += other.must @@ -256,7 +253,7 @@ class FunctionScore(Query): "functions": {"type": "score_function", "multi": True}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: if "functions" in kwargs: pass else: diff --git a/opensearchpy/helpers/query.pyi b/opensearchpy/helpers/query.pyi deleted file mode 100644 index 673e83f9..00000000 --- a/opensearchpy/helpers/query.pyi +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class Query(DslBase): ... -class MatchAll(Query): ... -class MatchNone(Query): ... -class Bool(Query): ... -class FunctionScore(Query): ... -class Boosting(Query): ... -class ConstantScore(Query): ... -class DisMax(Query): ... -class Filtered(Query): ... -class Indices(Query): ... -class Percolate(Query): ... -class Nested(Query): ... -class HasChild(Query): ... -class HasParent(Query): ... -class TopChildren(Query): ... -class SpanFirst(Query): ... -class SpanMulti(Query): ... -class SpanNear(Query): ... -class SpanNot(Query): ... -class SpanOr(Query): ... -class FieldMaskingSpan(Query): ... -class SpanContaining(Query): ... -class SpanWithin(Query): ... -class Common(Query): ... -class Fuzzy(Query): ... -class FuzzyLikeThis(Query): ... -class FuzzyLikeThisField(Query): ... -class RankFeature(Query): ... -class DistanceFeature(Query): ... -class GeoBoundingBox(Query): ... -class GeoDistance(Query): ... -class GeoDistanceRange(Query): ... -class GeoPolygon(Query): ... -class GeoShape(Query): ... -class GeohashCell(Query): ... -class Ids(Query): ... -class Intervals(Query): ... -class Limit(Query): ... -class Match(Query): ... -class MatchPhrase(Query): ... -class MatchPhrasePrefix(Query): ... -class MatchBoolPrefix(Query): ... -class Exists(Query): ... -class MoreLikeThis(Query): ... -class MoreLikeThisField(Query): ... -class MultiMatch(Query): ... -class Prefix(Query): ... -class QueryString(Query): ... -class Range(Query): ... -class Regexp(Query): ... -class Shape(Query): ... -class SimpleQueryString(Query): ... -class SpanTerm(Query): ... -class Template(Query): ... -class Term(Query): ... -class Terms(Query): ... -class TermsSet(Query): ... -class Wildcard(Query): ... -class Script(Query): ... -class ScriptScore(Query): ... -class Type(Query): ... -class ParentId(Query): ... -class Wrapper(Query): ... - -def Q(name_or_query: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/response/__init__.py b/opensearchpy/helpers/response/__init__.py index d4792b11..c6215a6b 100644 --- a/opensearchpy/helpers/response/__init__.py +++ b/opensearchpy/helpers/response/__init__.py @@ -25,51 +25,51 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, AttrList, _wrap from .hit import Hit, HitMeta -__all__ = ["Response", "AggResponse", "UpdateByQueryResponse", "Hit", "HitMeta"] - class Response(AttrDict): - def __init__(self, search, response, doc_class=None): + def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) super(Response, self).__init__(response) - def __iter__(self): + def __iter__(self) -> Any: return iter(self.hits) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: if isinstance(key, (slice, int)): # for slicing etc return self.hits[key] return super(Response, self).__getitem__(key) - def __nonzero__(self): + def __nonzero__(self) -> Any: return bool(self.hits) __bool__ = __nonzero__ - def __repr__(self): + def __repr__(self) -> str: return "" % (self.hits or self.aggregations) - def __len__(self): + def __len__(self) -> int: return len(self.hits) - def __getstate__(self): + def __getstate__(self) -> Any: return self._d_, self._search, self._doc_class - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("_d_", state[0]) super(AttrDict, self).__setattr__("_search", state[1]) super(AttrDict, self).__setattr__("_doc_class", state[2]) - def success(self): + def success(self) -> bool: return self._shards.total == self._shards.successful and not self.timed_out @property - def hits(self): + def hits(self) -> Any: if not hasattr(self, "_hits"): h = self._d_["hits"] @@ -86,11 +86,11 @@ def hits(self): return self._hits @property - def aggregations(self): + def aggregations(self) -> Any: return self.aggs @property - def aggs(self): + def aggs(self) -> Any: if not hasattr(self, "_aggs"): aggs = AggResponse( self._search.aggs, self._search, self._d_.get("aggregations", {}) @@ -102,27 +102,30 @@ def aggs(self): class AggResponse(AttrDict): - def __init__(self, aggs, search, data): + def __init__(self, aggs: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__("_meta", {"search": search, "aggs": aggs}) super(AggResponse, self).__init__(data) - def __getitem__(self, attr_name): + def __getitem__(self, attr_name: Any) -> Any: if attr_name in self._meta["aggs"]: # don't do self._meta['aggs'][attr_name] to avoid copying agg = self._meta["aggs"].aggs[attr_name] return agg.result(self._meta["search"], self._d_[attr_name]) return super(AggResponse, self).__getitem__(attr_name) - def __iter__(self): + def __iter__(self) -> Any: for name in self._meta["aggs"]: yield self[name] class UpdateByQueryResponse(AttrDict): - def __init__(self, search, response, doc_class=None): + def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) super(UpdateByQueryResponse, self).__init__(response) - def success(self): + def success(self) -> bool: return not self.timed_out and not self.failures + + +__all__ = ["Response", "AggResponse", "UpdateByQueryResponse", "Hit", "HitMeta"] diff --git a/opensearchpy/helpers/response/__init__.pyi b/opensearchpy/helpers/response/__init__.pyi deleted file mode 100644 index f592e46a..00000000 --- a/opensearchpy/helpers/response/__init__.pyi +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ..utils import AttrDict - -class Response(AttrDict): ... -class AggResponse(AttrDict): ... -class UpdateByQueryResponse(AttrDict): ... diff --git a/opensearchpy/helpers/response/aggs.py b/opensearchpy/helpers/response/aggs.py index c8e7d5dd..42015d2d 100644 --- a/opensearchpy/helpers/response/aggs.py +++ b/opensearchpy/helpers/response/aggs.py @@ -25,17 +25,19 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, AttrList from . import AggResponse, Response class Bucket(AggResponse): - def __init__(self, aggs, search, data, field=None): + def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: super(Bucket, self).__init__(aggs, search, data) class FieldBucket(Bucket): - def __init__(self, aggs, search, data, field=None): + def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: if field: data["key"] = field.deserialize(data["key"]) super(FieldBucket, self).__init__(aggs, search, data, field) @@ -44,7 +46,7 @@ def __init__(self, aggs, search, data, field=None): class BucketData(AggResponse): _bucket_class = Bucket - def _wrap_bucket(self, data): + def _wrap_bucket(self, data: Any) -> Any: return self._bucket_class( self._meta["aggs"], self._meta["search"], @@ -52,19 +54,19 @@ def _wrap_bucket(self, data): field=self._meta.get("field"), ) - def __iter__(self): + def __iter__(self) -> Any: return iter(self.buckets) - def __len__(self): + def __len__(self) -> int: return len(self.buckets) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: if isinstance(key, (int, slice)): return self.buckets[key] return super(BucketData, self).__getitem__(key) @property - def buckets(self): + def buckets(self) -> Any: if not hasattr(self, "_buckets"): field = getattr(self._meta["aggs"], "field", None) if field: @@ -83,8 +85,11 @@ class FieldBucketData(BucketData): class TopHitsData(Response): - def __init__(self, agg, search, data): + def __init__(self, agg: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__( "meta", AttrDict({"agg": agg, "search": search}) ) super(TopHitsData, self).__init__(search, data) + + +__all__ = ["AggResponse"] diff --git a/opensearchpy/helpers/response/aggs.pyi b/opensearchpy/helpers/response/aggs.pyi deleted file mode 100644 index d943dbdd..00000000 --- a/opensearchpy/helpers/response/aggs.pyi +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from . import AggResponse as AggResponse -from . import Response as Response - -class Bucket(AggResponse): ... -class FieldBucket(Bucket): ... -class BucketData(AggResponse): ... -class FieldBucketData(BucketData): ... -class TopHitsData(Response): ... diff --git a/opensearchpy/helpers/response/hit.py b/opensearchpy/helpers/response/hit.py index 8f6230aa..c6e8a4a9 100644 --- a/opensearchpy/helpers/response/hit.py +++ b/opensearchpy/helpers/response/hit.py @@ -25,11 +25,13 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, HitMeta class Hit(AttrDict): - def __init__(self, document): + def __init__(self, document: Any) -> None: data = {} if "_source" in document: data = document["_source"] @@ -40,22 +42,25 @@ def __init__(self, document): # assign meta as attribute and not as key in self._d_ super(AttrDict, self).__setattr__("meta", HitMeta(document)) - def __getstate__(self): + def __getstate__(self) -> Any: # add self.meta since it is not in self.__dict__ return super(Hit, self).__getstate__() + (self.meta,) - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("meta", state[-1]) super(Hit, self).__setstate__(state[:-1]) - def __dir__(self): + def __dir__(self) -> Any: # be sure to expose meta in dir(self) return super(Hit, self).__dir__() + ["meta"] - def __repr__(self): + def __repr__(self) -> str: return "".format( "/".join( getattr(self.meta, key) for key in ("index", "id") if key in self.meta ), super(Hit, self).__repr__(), ) + + +__all__ = ["Hit", "HitMeta"] diff --git a/opensearchpy/helpers/response/hit.pyi b/opensearchpy/helpers/response/hit.pyi deleted file mode 100644 index 7597832d..00000000 --- a/opensearchpy/helpers/response/hit.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ..utils import AttrDict - -class Hit(AttrDict): ... diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 30c59a92..46ba9da9 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -25,12 +25,9 @@ # specific language governing permissions and limitations # under the License. +import collections.abc as collections_abc import copy - -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from typing import Any from six import iteritems, string_types @@ -39,8 +36,8 @@ from opensearchpy.helpers import scan from ..exceptions import IllegalOperation -from ..helpers.aggs import A, AggBase from ..helpers.query import Bool, Q +from .aggs import A, AggBase from .response import Hit, Response from .utils import AttrDict, DslBase, recursive_to_dict @@ -52,17 +49,17 @@ class QueryProxy(object): the wrapped query. """ - def __init__(self, search, attr_name): + def __init__(self, search: Any, attr_name: Any) -> None: self._search = search - self._proxied = None + self._proxied: Any = None self._attr_name = attr_name - def __nonzero__(self): + def __nonzero__(self) -> bool: return self._proxied is not None __bool__ = __nonzero__ - def __call__(self, *args, **kwargs): + def __call__(self, *args: Any, **kwargs: Any) -> Any: s = self._search._clone() # we cannot use self._proxied since we just cloned self._search and @@ -76,19 +73,19 @@ def __call__(self, *args, **kwargs): # always return search to be chainable return s - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._proxied, attr_name) - def __setattr__(self, attr_name, value): + def __setattr__(self, attr_name: Any, value: Any) -> None: if not attr_name.startswith("_"): self._proxied = Q(self._proxied.to_dict()) setattr(self._proxied, attr_name, value) super(QueryProxy, self).__setattr__(attr_name, value) - def __getstate__(self): + def __getstate__(self) -> Any: return self._search, self._proxied, self._attr_name - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: self._search, self._proxied, self._attr_name = state @@ -101,13 +98,13 @@ class ProxyDescriptor(object): """ - def __init__(self, name): + def __init__(self, name: str) -> None: self._attr_name = "_%s_proxy" % name - def __get__(self, instance, owner): + def __get__(self, instance: Any, owner: Any) -> Any: return getattr(instance, self._attr_name) - def __set__(self, instance, value): + def __set__(self, instance: Any, value: Any) -> None: proxy = getattr(instance, self._attr_name) proxy._proxied = Q(value) @@ -115,17 +112,26 @@ def __set__(self, instance, value): class AggsProxy(AggBase, DslBase): name = "aggs" - def __init__(self, search): + def __init__(self, search: Any) -> None: self._base = self self._search = search self._params = {"aggs": {}} - def to_dict(self): + def to_dict(self) -> Any: return super(AggsProxy, self).to_dict().get("aggs", {}) class Request(object): - def __init__(self, using="default", index=None, doc_type=None, extra=None): + _doc_type: Any + _doc_type_map: Any + + def __init__( + self, + using: str = "default", + index: Any = None, + doc_type: Any = None, + extra: Any = None, + ) -> None: self._using = using self._index = None @@ -144,22 +150,22 @@ def __init__(self, using="default", index=None, doc_type=None, extra=None): elif doc_type: self._doc_type.append(doc_type) - self._params = {} - self._extra = extra or {} + self._params: Any = {} + self._extra: Any = extra or {} - def __eq__(self, other): + def __eq__(self: Any, other: Any) -> bool: return ( isinstance(other, Request) and other._params == self._params and other._index == self._index and other._doc_type == self._doc_type - and other.to_dict() == self.to_dict() + and other.to_dict() == self.to_dict() # type: ignore ) - def __copy__(self): + def __copy__(self) -> Any: return self._clone() - def params(self, **kwargs): + def params(self, **kwargs: Any) -> Any: """ Specify query params to be used when executing the search. All the keyword arguments will override the current values. @@ -173,7 +179,7 @@ def params(self, **kwargs): s._params.update(kwargs) return s - def index(self, *index): + def index(self, *index: Any) -> Any: """ Set the index for the search. If called empty it will remove all information. @@ -201,7 +207,7 @@ def index(self, *index): return s - def _resolve_field(self, path): + def _resolve_field(self, path: Any) -> Any: for dt in self._doc_type: if not hasattr(dt, "_index"): continue @@ -209,10 +215,10 @@ def _resolve_field(self, path): if field is not None: return field - def _resolve_nested(self, hit, parent_class=None): + def _resolve_nested(self, hit: Any, parent_class: Any = None) -> Any: doc_class = Hit - nested_path = [] + nested_path: Any = [] nesting = hit["_nested"] while nesting and "field" in nesting: nested_path.append(nesting["field"]) @@ -229,7 +235,7 @@ def _resolve_nested(self, hit, parent_class=None): return doc_class - def _get_result(self, hit, parent_class=None): + def _get_result(self, hit: Any, parent_class: Any = None) -> Any: doc_class = Hit dt = hit.get("_type") @@ -253,7 +259,7 @@ def _get_result(self, hit, parent_class=None): callback = getattr(doc_class, "from_opensearch", doc_class) return callback(hit) - def doc_type(self, *doc_type, **kwargs): + def doc_type(self, *doc_type: Any, **kwargs: Any) -> Any: """ Set the type to search through. You can supply a single value or multiple. Values can be strings or subclasses of ``Document``. @@ -279,7 +285,7 @@ def doc_type(self, *doc_type, **kwargs): s._doc_type_map.update(kwargs) return s - def using(self, client): + def using(self, client: Any) -> Any: """ Associate the search request with an opensearch client. A fresh copy will be returned with current instance remaining unchanged. @@ -292,7 +298,7 @@ def using(self, client): s._using = client return s - def extra(self, **kwargs): + def extra(self, **kwargs: Any) -> Any: """ Add extra keys to the request body. Mostly here for backwards compatibility. @@ -303,7 +309,7 @@ def extra(self, **kwargs): s._extra.update(kwargs) return s - def _clone(self): + def _clone(self) -> Any: s = self.__class__( using=self._using, index=self._index, doc_type=self._doc_type ) @@ -317,7 +323,7 @@ class Search(Request): query = ProxyDescriptor("query") post_filter = ProxyDescriptor("post_filter") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Search request to opensearch. @@ -331,31 +337,31 @@ def __init__(self, **kwargs): super(Search, self).__init__(**kwargs) self.aggs = AggsProxy(self) - self._sort = [] - self._collapse = {} - self._source = None - self._highlight = {} - self._highlight_opts = {} - self._suggest = {} - self._script_fields = {} + self._sort: Any = [] + self._collapse: Any = {} + self._source: Any = None + self._highlight: Any = {} + self._highlight_opts: Any = {} + self._suggest: Any = {} + self._script_fields: Any = {} self._response_class = Response self._query_proxy = QueryProxy(self, "query") self._post_filter_proxy = QueryProxy(self, "post_filter") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) - def __iter__(self): + def __iter__(self) -> Any: """ Iterate over the hits. """ return iter(self.execute()) - def __getitem__(self, n): + def __getitem__(self, n: Any) -> Any: """ Support slicing the `Search` instance for pagination. @@ -390,7 +396,7 @@ def __getitem__(self, n): return s @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `Search` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -411,7 +417,7 @@ def from_dict(cls, d): s.update_from_dict(d) return s - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -434,7 +440,7 @@ def _clone(self): s.aggs._params = {"aggs": self.aggs._params["aggs"].copy()} return s - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -442,7 +448,7 @@ def response_class(self, cls): s._response_class = cls return s - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "Search": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -477,7 +483,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script_fields(self, **kwargs): + def script_fields(self, **kwargs: Any) -> Any: """ Define script fields to be calculated on hits. @@ -503,7 +509,7 @@ def script_fields(self, **kwargs): s._script_fields.update(kwargs) return s - def source(self, fields=None, **kwargs): + def source(self, fields: Any = None, **kwargs: Any) -> Any: """ Selectively control how the _source field is returned. @@ -548,7 +554,7 @@ def source(self, fields=None, **kwargs): return s - def sort(self, *keys): + def sort(self, *keys: Any) -> Any: """ Add sorting information to the search request. If called without arguments it will remove all sort requirements. Otherwise it will @@ -581,7 +587,12 @@ def sort(self, *keys): s._sort.append(k) return s - def collapse(self, field=None, inner_hits=None, max_concurrent_group_searches=None): + def collapse( + self, + field: Any = None, + inner_hits: Any = None, + max_concurrent_group_searches: Any = None, + ) -> Any: """ Add collapsing information to the search request. @@ -604,7 +615,7 @@ def collapse(self, field=None, inner_hits=None, max_concurrent_group_searches=No s._collapse["max_concurrent_group_searches"] = max_concurrent_group_searches return s - def highlight_options(self, **kwargs): + def highlight_options(self, **kwargs: Any) -> Any: """ Update the global highlighting options used for this request. For example:: @@ -616,7 +627,7 @@ def highlight_options(self, **kwargs): s._highlight_opts.update(kwargs) return s - def highlight(self, *fields, **kwargs): + def highlight(self, *fields: Any, **kwargs: Any) -> Any: """ Request highlighting of some fields. All keyword arguments passed in will be used as parameters for all the fields in the ``fields`` parameter. Example:: @@ -656,7 +667,7 @@ def highlight(self, *fields, **kwargs): s._highlight[f] = kwargs return s - def suggest(self, name, text, **kwargs): + def suggest(self, name: Any, text: Any, **kwargs: Any) -> Any: """ Add a suggestions request to the search. @@ -673,7 +684,7 @@ def suggest(self, name, text, **kwargs): s._suggest[name].update(kwargs) return s - def to_dict(self, count=False, **kwargs): + def to_dict(self, count: bool = False, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request's body. @@ -720,7 +731,7 @@ def to_dict(self, count=False, **kwargs): d.update(recursive_to_dict(kwargs)) return d - def count(self): + def count(self) -> Any: """ Return the number of hits matching the query and filters. Note that only the actual number is returned. @@ -734,7 +745,7 @@ def count(self): # TODO: failed shards detection return opensearch.count(index=self._index, body=d, **self._params)["count"] - def execute(self, ignore_cache=False): + def execute(self, ignore_cache: bool = False) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. @@ -753,7 +764,7 @@ def execute(self, ignore_cache=False): ) return self._response - def scan(self): + def scan(self) -> Any: """ Turn the search into a scan search and return a generator that will iterate over all the documents matching the query. @@ -769,7 +780,7 @@ def scan(self): ): yield self._get_result(hit) - def delete(self): + def delete(self) -> Any: """ delete() executes the query by delegating to delete_by_query() """ @@ -789,22 +800,22 @@ class MultiSearch(Request): request. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(MultiSearch, self).__init__(**kwargs) - self._searches = [] + self._searches: Any = [] - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return self._searches[key] - def __iter__(self): + def __iter__(self) -> Any: return iter(self._searches) - def _clone(self): + def _clone(self) -> Any: ms = super(MultiSearch, self)._clone() ms._searches = self._searches[:] return ms - def add(self, search): + def add(self, search: Any) -> Any: """ Adds a new :class:`~opensearchpy.Search` object to the request:: @@ -816,7 +827,7 @@ def add(self, search): ms._searches.append(search) return ms - def to_dict(self): + def to_dict(self) -> Any: out = [] for s in self._searches: meta = {} @@ -829,7 +840,7 @@ def to_dict(self): return out - def execute(self, ignore_cache=False, raise_on_error=True): + def execute(self, ignore_cache: Any = False, raise_on_error: Any = True) -> Any: """ Execute the multi search request and return a list of search results. """ diff --git a/opensearchpy/helpers/search.pyi b/opensearchpy/helpers/search.pyi deleted file mode 100644 index 49eecb0d..00000000 --- a/opensearchpy/helpers/search.pyi +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .aggs import AggBase -from .utils import DslBase - -class QueryProxy(object): ... -class ProxyDescriptor(object): ... -class AggsProxy(AggBase, DslBase): ... -class Request(object): ... -class Search(Request): ... -class MultiSearch(Request): ... diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index ad6e9c65..930b8d25 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -8,16 +8,11 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys from typing import Any, Callable, Dict +from urllib.parse import parse_qs, urlencode, urlparse import requests -PY3 = sys.version_info[0] == 3 - -if PY3: - from urllib.parse import parse_qs, urlencode, urlparse - class AWSV4Signer: """ @@ -81,7 +76,7 @@ class RequestsAWSV4SignerAuth(requests.auth.AuthBase): AWS V4 Request Signer for Requests. """ - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore self.signer = AWSV4Signer(credentials, region, service) def __call__(self, request): # type: ignore @@ -136,7 +131,7 @@ class AWSV4SignerAuth(RequestsAWSV4SignerAuth): class Urllib3AWSV4SignerAuth(Callable): # type: ignore - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore self.signer = AWSV4Signer(credentials, region, service) def __call__(self, method: str, url: str, body: Any) -> Dict[str, str]: diff --git a/opensearchpy/helpers/test.py b/opensearchpy/helpers/test.py index e2467584..96282c49 100644 --- a/opensearchpy/helpers/test.py +++ b/opensearchpy/helpers/test.py @@ -30,18 +30,17 @@ import os import time +from typing import Any, Tuple from unittest import SkipTest, TestCase +import opensearchpy.client from opensearchpy import OpenSearch from opensearchpy.exceptions import ConnectionError -if "OPENSEARCH_URL" in os.environ: - OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] -else: - OPENSEARCH_URL = "https://admin:admin@localhost:9200" +OPENSEARCH_URL = os.environ.get("OPENSEARCH_URL", "https://admin:admin@localhost:9200") -def get_test_client(nowait=False, **kwargs): +def get_test_client(nowait: bool = False, **kwargs: Any) -> OpenSearch: # construct kwargs from the environment kw = {"timeout": 30} @@ -69,14 +68,14 @@ def get_test_client(nowait=False, **kwargs): class OpenSearchTestCase(TestCase): @staticmethod - def _get_client(): + def _get_client() -> OpenSearch: return get_test_client() @classmethod - def setup_class(cls): + def setup_class(cls) -> None: cls.client = cls._get_client() - def teardown_method(self, _): + def teardown_method(self, _: Any) -> None: # Hidden indices expanded in wildcards in OpenSearch 7.7 expand_wildcards = ["open", "closed"] if self.opensearch_version() >= (1, 0): @@ -87,20 +86,20 @@ def teardown_method(self, _): ) self.client.indices.delete_template(name="*", ignore=404) - def opensearch_version(self): + def opensearch_version(self) -> Tuple[int, ...]: if not hasattr(self, "_opensearch_version"): self._opensearch_version = opensearch_version(self.client) return self._opensearch_version -def _get_version(version_string): +def _get_version(version_string: str) -> Tuple[int, ...]: if "." not in version_string: return () version = version_string.strip().split(".") return tuple(int(v) if v.isdigit() else 999 for v in version) -def opensearch_version(client): +def opensearch_version(client: opensearchpy.client.OpenSearch) -> Tuple[int, int, int]: return _get_version(client.info()["version"]["number"]) diff --git a/opensearchpy/helpers/test.pyi b/opensearchpy/helpers/test.pyi deleted file mode 100644 index a4d2302a..00000000 --- a/opensearchpy/helpers/test.pyi +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Tuple -from unittest import TestCase - -from ..client import OpenSearch - -OPENSEARCH_URL: str - -def get_test_client(nowait: bool = ..., **kwargs: Any) -> OpenSearch: ... -def _get_version(version_string: str) -> Tuple[int, ...]: ... - -class OpenSearchTestCase(TestCase): - @staticmethod - def _get_client() -> OpenSearch: ... - @classmethod - def setup_class(cls) -> None: ... - def teardown_method(self, _: Any) -> None: ... - def opensearch_version(self) -> Tuple[int, ...]: ... diff --git a/opensearchpy/helpers/update_by_query.py b/opensearchpy/helpers/update_by_query.py index 32c7b705..7b560216 100644 --- a/opensearchpy/helpers/update_by_query.py +++ b/opensearchpy/helpers/update_by_query.py @@ -25,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy.connection.connections import get_connection from ..helpers.query import Bool, Q @@ -36,7 +38,7 @@ class UpdateByQuery(Request): query = ProxyDescriptor("query") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Update by query request to opensearch. @@ -50,17 +52,17 @@ def __init__(self, **kwargs): """ super(UpdateByQuery, self).__init__(**kwargs) self._response_class = UpdateByQueryResponse - self._script = {} + self._script: Any = {} self._query_proxy = QueryProxy(self, "query") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `UpdateByQuery` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -81,7 +83,7 @@ def from_dict(cls, d): u.update_from_dict(d) return u - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -94,7 +96,7 @@ def _clone(self): ubq.query._proxied = self.query._proxied return ubq - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -102,7 +104,7 @@ def response_class(self, cls): ubq._response_class = cls return ubq - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "UpdateByQuery": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -115,7 +117,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script(self, **kwargs): + def script(self, **kwargs: Any) -> Any: """ Define update action to take: @@ -136,7 +138,7 @@ def script(self, **kwargs): ubq._script.update(kwargs) return ubq - def to_dict(self, **kwargs): + def to_dict(self, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request'ubq body. @@ -154,7 +156,7 @@ def to_dict(self, **kwargs): d.update(recursive_to_dict(kwargs)) return d - def execute(self): + def execute(self) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. diff --git a/opensearchpy/helpers/update_by_query.pyi b/opensearchpy/helpers/update_by_query.pyi deleted file mode 100644 index c0baf631..00000000 --- a/opensearchpy/helpers/update_by_query.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .search import Request - -class UpdateByQuery(Request): ... diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 4cd9dad8..e17b89a6 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -27,19 +27,16 @@ from __future__ import unicode_literals -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from copy import copy +from typing import Any, Callable, Dict, Optional, Tuple from six import add_metaclass, iteritems from six.moves import map from opensearchpy.exceptions import UnknownDslObject, ValidationException -SKIP_VALUES = ("", None) +SKIP_VALUES: Tuple[str, None] = ("", None) EXPAND__TO_DOT = True DOC_META_FIELDS = frozenset( @@ -62,7 +59,7 @@ ).union(DOC_META_FIELDS) -def _wrap(val, obj_wrapper=None): +def _wrap(val: Any, obj_wrapper: Optional[Callable[..., Any]] = None) -> Any: if isinstance(val, collections_abc.Mapping): return AttrDict(val) if obj_wrapper is None else obj_wrapper(val) if isinstance(val, list): @@ -71,52 +68,54 @@ def _wrap(val, obj_wrapper=None): class AttrList(object): - def __init__(self, p, obj_wrapper=None): + def __init__( + self, p: Any, obj_wrapper: Optional[Callable[..., Any]] = None + ) -> None: # make iterables into lists if not isinstance(p, list): p = list(p) self._l_ = p self._obj_wrapper = obj_wrapper - def __repr__(self): + def __repr__(self) -> str: return repr(self._l_) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, AttrList): - return other._l_ == self._l_ + return bool(other._l_ == self._l_) # make sure we still equal to a dict with the same data - return other == self._l_ + return bool(other == self._l_) - def __ne__(self, other): - return not self == other + def __ne__(self, other: Any) -> bool: + return bool(not self == other) - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: p = self._l_[k] if isinstance(k, slice): return AttrList(p, obj_wrapper=self._obj_wrapper) return _wrap(p, self._obj_wrapper) - def __setitem__(self, k, value): + def __setitem__(self, k: Any, value: Any) -> None: self._l_[k] = value - def __iter__(self): + def __iter__(self) -> Any: return map(lambda i: _wrap(i, self._obj_wrapper), self._l_) - def __len__(self): + def __len__(self) -> int: return len(self._l_) - def __nonzero__(self): + def __nonzero__(self) -> bool: return bool(self._l_) __bool__ = __nonzero__ - def __getattr__(self, name): + def __getattr__(self, name: Any) -> Any: return getattr(self._l_, name) - def __getstate__(self): + def __getstate__(self) -> Any: return self._l_, self._obj_wrapper - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: self._l_, self._obj_wrapper = state @@ -127,44 +126,44 @@ class AttrDict(object): nested dsl dicts. """ - def __init__(self, d): + def __init__(self, d: Any) -> None: # assign the inner dict manually to prevent __setattr__ from firing super(AttrDict, self).__setattr__("_d_", d) - def __contains__(self, key): + def __contains__(self, key: Any) -> bool: return key in self._d_ - def __nonzero__(self): + def __nonzero__(self) -> bool: return bool(self._d_) __bool__ = __nonzero__ - def __dir__(self): + def __dir__(self) -> Any: # introspection for auto-complete in IPython etc return list(self._d_.keys()) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, AttrDict): - return other._d_ == self._d_ + return bool(other._d_ == self._d_) # make sure we still equal to a dict with the same data - return other == self._d_ + return bool(other == self._d_) - def __ne__(self, other): - return not self == other + def __ne__(self, other: Any) -> bool: + return bool(not self == other) - def __repr__(self): + def __repr__(self) -> str: r = repr(self._d_) if len(r) > 60: r = r[:60] + "...}" return r - def __getstate__(self): + def __getstate__(self) -> Any: return (self._d_,) - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("_d_", state[0]) - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: try: return self.__getitem__(attr_name) except KeyError: @@ -174,7 +173,7 @@ def __getattr__(self, attr_name): ) ) - def get(self, key, default=None): + def get(self, key: Any, default: Any = None) -> Any: try: return self.__getattr__(key) except AttributeError: @@ -182,7 +181,7 @@ def get(self, key, default=None): return default raise - def __delattr__(self, attr_name): + def __delattr__(self, attr_name: Any) -> None: try: del self._d_[attr_name] except KeyError: @@ -192,26 +191,26 @@ def __delattr__(self, attr_name): ) ) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return _wrap(self._d_[key]) - def __setitem__(self, key, value): + def __setitem__(self, key: Any, value: Any) -> None: self._d_[key] = value - def __delitem__(self, key): + def __delitem__(self, key: Any) -> None: del self._d_[key] - def __setattr__(self, name, value): + def __setattr__(self, name: Any, value: Any) -> None: if name in self._d_ or not hasattr(self.__class__, name): self._d_[name] = value else: # there is an attribute on the class (could be property, ..) - don't add it as field super(AttrDict, self).__setattr__(name, value) - def __iter__(self): + def __iter__(self) -> Any: return iter(self._d_) - def to_dict(self): + def to_dict(self) -> Any: return self._d_ @@ -228,9 +227,10 @@ class DslMeta(type): For typical use see `QueryMeta` and `Query` in `opensearchpy.query`. """ - _types = {} + _types: Dict[str, Any] = {} - def __init__(cls, name, bases, attrs): + def __init__(cls: Any, name: str, bases: Any, attrs: Any) -> None: + # TODO: why is it calling itself?! super(DslMeta, cls).__init__(name, bases, attrs) # skip for DslBase if not hasattr(cls, "_type_shortcut"): @@ -246,7 +246,7 @@ def __init__(cls, name, bases, attrs): cls._classes[cls.name] = cls @classmethod - def get_dsl_type(cls, name): + def get_dsl_type(cls, name: Any) -> Any: try: return cls._types[name] except KeyError: @@ -270,10 +270,11 @@ class DslBase(object): all values in the `must` attribute into Query objects) """ - _param_defs = {} + _param_defs: Dict[str, Any] = {} + _params: Dict[str, Any] @classmethod - def get_dsl_class(cls, name, default=None): + def get_dsl_class(cls: Any, name: Any, default: Optional[bool] = None) -> Any: try: return cls._classes[name] except KeyError: @@ -283,14 +284,14 @@ def get_dsl_class(cls, name, default=None): "DSL class `{}` does not exist in {}.".format(name, cls._type_name) ) - def __init__(self, _expand__to_dot=EXPAND__TO_DOT, **params): + def __init__(self, _expand__to_dot: bool = EXPAND__TO_DOT, **params: Any) -> None: self._params = {} for pname, pvalue in iteritems(params): if "__" in pname and _expand__to_dot: pname = pname.replace("__", ".") self._setattr(pname, pvalue) - def _repr_params(self): + def _repr_params(self) -> str: """Produce a repr of all our parameters to be used in __repr__.""" return ", ".join( "{}={!r}".format(n.replace(".", "__"), v) @@ -299,21 +300,21 @@ def _repr_params(self): if "type" not in self._param_defs.get(n, {}) or v ) - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format(self.__class__.__name__, self._repr_params()) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and other.to_dict() == self.to_dict() - def __ne__(self, other): + def __ne__(self, other: Any) -> bool: return not self == other - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: Optional[bool]) -> None: if name.startswith("_"): return super(DslBase, self).__setattr__(name, value) return self._setattr(name, value) - def _setattr(self, name, value): + def _setattr(self, name: Any, value: Any) -> None: # if this attribute has special type assigned to it... if name in self._param_defs: pinfo = self._param_defs[name] @@ -343,7 +344,7 @@ def _setattr(self, name, value): value = shortcut(value) self._params[name] = value - def __getattr__(self, name): + def __getattr__(self, name: str) -> Any: if name.startswith("_"): raise AttributeError( "{!r} object has no attribute {!r}".format( @@ -375,7 +376,7 @@ def __getattr__(self, name): return AttrDict(value) return value - def to_dict(self): + def to_dict(self) -> Any: """ Serialize the DSL object to plain dict """ @@ -414,7 +415,7 @@ def to_dict(self): d[pname] = value return {self.name: d} - def _clone(self): + def _clone(self) -> Any: c = self.__class__() for attr in self._params: c._params[attr] = copy(self._params[attr]) @@ -422,7 +423,9 @@ def _clone(self): class HitMeta(AttrDict): - def __init__(self, document, exclude=("_source", "_fields")): + def __init__( + self, document: Dict[str, Any], exclude: Any = ("_source", "_fields") + ) -> None: d = { k[1:] if k.startswith("_") else k: v for (k, v) in iteritems(document) @@ -435,7 +438,7 @@ def __init__(self, document, exclude=("_source", "_fields")): class ObjectBase(AttrDict): - def __init__(self, meta=None, **kwargs): + def __init__(self, meta: Any = None, **kwargs: Any) -> None: meta = meta or {} for k in list(kwargs): if k.startswith("_") and k[1:] in META_FIELDS: @@ -446,7 +449,7 @@ def __init__(self, meta=None, **kwargs): super(ObjectBase, self).__init__(kwargs) @classmethod - def __list_fields(cls): + def __list_fields(cls: Any) -> Any: """ Get all the fields defined for our class, if we have an Index, try looking at the index mappings as well, mark the fields from Index as @@ -467,7 +470,7 @@ def __list_fields(cls): yield name, field, True @classmethod - def __get_field(cls, name): + def __get_field(cls: Any, name: Any) -> Any: try: return cls._doc_type.mapping[name] except KeyError: @@ -479,30 +482,30 @@ def __get_field(cls, name): pass @classmethod - def from_opensearch(cls, hit): + def from_opensearch(cls: Any, hit: Any) -> Any: meta = hit.copy() data = meta.pop("_source", {}) doc = cls(meta=meta) doc._from_dict(data) return doc - def _from_dict(self, data): + def _from_dict(self, data: Any) -> None: for k, v in iteritems(data): f = self.__get_field(k) if f and f._coerce: v = f.deserialize(v) setattr(self, k, v) - def __getstate__(self): + def __getstate__(self) -> Any: return self.to_dict(), self.meta._d_ - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: data, meta = state super(AttrDict, self).__setattr__("_d_", {}) super(AttrDict, self).__setattr__("meta", HitMeta(meta)) self._from_dict(data) - def __getattr__(self, name): + def __getattr__(self, name: Any) -> Any: try: return super(ObjectBase, self).__getattr__(name) except AttributeError: @@ -515,7 +518,7 @@ def __getattr__(self, name): return value raise - def to_dict(self, skip_empty=True): + def to_dict(self, skip_empty: Optional[bool] = True) -> Any: out = {} for k, v in iteritems(self._d_): # if this is a mapped field, @@ -536,8 +539,8 @@ def to_dict(self, skip_empty=True): out[k] = v return out - def clean_fields(self): - errors = {} + def clean_fields(self) -> None: + errors: Dict[str, Any] = {} for name, field, optional in self.__list_fields(): data = self._d_.get(name, None) if data is None and optional: @@ -554,15 +557,15 @@ def clean_fields(self): if errors: raise ValidationException(errors) - def clean(self): + def clean(self) -> None: pass - def full_clean(self): + def full_clean(self) -> None: self.clean_fields() self.clean() -def merge(data, new_data, raise_on_conflict=False): +def merge(data: Any, new_data: Any, raise_on_conflict: bool = False) -> None: if not ( isinstance(data, (AttrDict, collections_abc.Mapping)) and isinstance(new_data, (AttrDict, collections_abc.Mapping)) @@ -573,6 +576,13 @@ def merge(data, new_data, raise_on_conflict=False): ) ) + if not isinstance(new_data, Dict): + raise ValueError( + "You can only merge two dicts! Got {!r} and {!r} instead.".format( + data, new_data + ) + ) + for key, value in iteritems(new_data): if ( key in data @@ -583,10 +593,10 @@ def merge(data, new_data, raise_on_conflict=False): elif key in data and data[key] != value and raise_on_conflict: raise ValueError("Incompatible data for key %r, cannot be merged." % key) else: - data[key] = value + data[key] = value # type: ignore -def recursive_to_dict(data): +def recursive_to_dict(data: Any) -> Any: """Recursively transform objects that potentially have .to_dict() into dictionary literals by traversing AttrList, AttrDict, list, tuple, and Mapping types. diff --git a/opensearchpy/helpers/utils.pyi b/opensearchpy/helpers/utils.pyi deleted file mode 100644 index decb7382..00000000 --- a/opensearchpy/helpers/utils.pyi +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -class AttrList(object): ... -class AttrDict(object): ... -class DslMeta(type): ... -class DslBase(object): ... -class HitMeta(AttrDict): ... -class ObjectBase(AttrDict): ... diff --git a/opensearchpy/helpers/wrappers.py b/opensearchpy/helpers/wrappers.py index 968909a6..1583391c 100644 --- a/opensearchpy/helpers/wrappers.py +++ b/opensearchpy/helpers/wrappers.py @@ -26,13 +26,12 @@ # under the License. import operator +from typing import Any from six import iteritems, string_types from .utils import AttrDict -__all__ = ["Range"] - class Range(AttrDict): OPS = { @@ -42,7 +41,7 @@ class Range(AttrDict): "gte": operator.ge, } - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: if args and (len(args) > 1 or kwargs or not isinstance(args[0], dict)): raise ValueError( "Range accepts a single dictionary or a set of keyword arguments." @@ -61,10 +60,10 @@ def __init__(self, *args, **kwargs): super(Range, self).__init__(args[0] if args else kwargs) - def __repr__(self): + def __repr__(self) -> str: return "Range(%s)" % ", ".join("%s=%r" % op for op in iteritems(self._d_)) - def __contains__(self, item): + def __contains__(self, item: Any) -> bool: if isinstance(item, string_types): return super(Range, self).__contains__(item) @@ -74,7 +73,7 @@ def __contains__(self, item): return True @property - def upper(self): + def upper(self) -> Any: if "lt" in self._d_: return self._d_["lt"], False if "lte" in self._d_: @@ -82,9 +81,12 @@ def upper(self): return None, False @property - def lower(self): + def lower(self) -> Any: if "gt" in self._d_: return self._d_["gt"], False if "gte" in self._d_: return self._d_["gte"], True return None, False + + +__all__ = ["Range"] diff --git a/opensearchpy/helpers/wrappers.pyi b/opensearchpy/helpers/wrappers.pyi deleted file mode 100644 index 704159dc..00000000 --- a/opensearchpy/helpers/wrappers.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import AttrDict - -class Range(AttrDict): ... diff --git a/opensearchpy/plugins/__init__.pyi b/opensearchpy/plugins/__init__.pyi deleted file mode 100644 index 22c54ac8..00000000 --- a/opensearchpy/plugins/__init__.pyi +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. diff --git a/opensearchpy/plugins/alerting.py b/opensearchpy/plugins/alerting.py index 66b759a4..02c6b1a1 100644 --- a/opensearchpy/plugins/alerting.py +++ b/opensearchpy/plugins/alerting.py @@ -9,12 +9,14 @@ # GitHub history for details. +from typing import Any + from ..client.utils import NamespacedClient, _make_path, query_params class AlertingClient(NamespacedClient): @query_params() - def search_monitor(self, body, params=None, headers=None): + def search_monitor(self, body: Any, params: Any = None, headers: Any = None) -> Any: """ Returns the search result for a monitor. @@ -29,7 +31,9 @@ def search_monitor(self, body, params=None, headers=None): ) @query_params() - def get_monitor(self, monitor_id, params=None, headers=None): + def get_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Returns the details of a specific monitor. @@ -43,7 +47,9 @@ def get_monitor(self, monitor_id, params=None, headers=None): ) @query_params("dryrun") - def run_monitor(self, monitor_id, params=None, headers=None): + def run_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Runs/Executes a specific monitor. @@ -58,7 +64,9 @@ def run_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - def create_monitor(self, body=None, params=None, headers=None): + def create_monitor( + self, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates a monitor with inputs, triggers, and actions. @@ -73,7 +81,9 @@ def create_monitor(self, body=None, params=None, headers=None): ) @query_params() - def update_monitor(self, monitor_id, body=None, params=None, headers=None): + def update_monitor( + self, monitor_id: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates a monitor's inputs, triggers, and actions. @@ -89,7 +99,9 @@ def update_monitor(self, monitor_id, body=None, params=None, headers=None): ) @query_params() - def delete_monitor(self, monitor_id, params=None, headers=None): + def delete_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes a specific monitor. @@ -103,7 +115,9 @@ def delete_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - def get_destination(self, destination_id=None, params=None, headers=None): + def get_destination( + self, destination_id: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Returns the details of a specific destination. @@ -119,7 +133,9 @@ def get_destination(self, destination_id=None, params=None, headers=None): ) @query_params() - def create_destination(self, body=None, params=None, headers=None): + def create_destination( + self, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates a destination for slack, mail, or custom-webhook. @@ -134,7 +150,13 @@ def create_destination(self, body=None, params=None, headers=None): ) @query_params() - def update_destination(self, destination_id, body=None, params=None, headers=None): + def update_destination( + self, + destination_id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a destination's inputs, triggers, and actions. @@ -150,7 +172,9 @@ def update_destination(self, destination_id, body=None, params=None, headers=Non ) @query_params() - def delete_destination(self, destination_id, params=None, headers=None): + def delete_destination( + self, destination_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes a specific destination. @@ -164,7 +188,7 @@ def delete_destination(self, destination_id, params=None, headers=None): ) @query_params() - def get_alerts(self, params=None, headers=None): + def get_alerts(self, params: Any = None, headers: Any = None) -> Any: """ Returns all alerts. @@ -177,7 +201,9 @@ def get_alerts(self, params=None, headers=None): ) @query_params() - def acknowledge_alert(self, monitor_id, body=None, params=None, headers=None): + def acknowledge_alert( + self, monitor_id: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Acknowledges an alert. diff --git a/opensearchpy/plugins/alerting.pyi b/opensearchpy/plugins/alerting.pyi deleted file mode 100644 index 4454bf3c..00000000 --- a/opensearchpy/plugins/alerting.pyi +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class AlertingClient(NamespacedClient): - def search_monitor( - self, body: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def run_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def create_monitor( - self, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def update_monitor( - self, - monitor_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_destination( - self, - destination_id: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def create_destination( - self, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def update_destination( - self, - destination_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_destination( - self, destination_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_alerts( - self, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def acknowledge_alert( - self, - monitor_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/plugins/index_management.py b/opensearchpy/plugins/index_management.py index 9f4f5a6e..77a31279 100644 --- a/opensearchpy/plugins/index_management.py +++ b/opensearchpy/plugins/index_management.py @@ -9,12 +9,16 @@ # GitHub history for details. +from typing import Any + from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndexManagementClient(NamespacedClient): @query_params() - def put_policy(self, policy, body=None, params=None, headers=None): + def put_policy( + self, policy: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates, or updates, a policy. @@ -32,7 +36,9 @@ def put_policy(self, policy, body=None, params=None, headers=None): ) @query_params() - def add_policy(self, index, body=None, params=None, headers=None): + def add_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Adds a policy to an index. This operation does not change the policy if the index already has one. @@ -50,7 +56,7 @@ def add_policy(self, index, body=None, params=None, headers=None): ) @query_params() - def get_policy(self, policy, params=None, headers=None): + def get_policy(self, policy: Any, params: Any = None, headers: Any = None) -> Any: """ Gets the policy by `policy_id`. @@ -67,7 +73,9 @@ def get_policy(self, policy, params=None, headers=None): ) @query_params() - def remove_policy_from_index(self, index, params=None, headers=None): + def remove_policy_from_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Removes any ISM policy from the index. @@ -84,7 +92,9 @@ def remove_policy_from_index(self, index, params=None, headers=None): ) @query_params() - def change_policy(self, index, body=None, params=None, headers=None): + def change_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates the managed index policy to a new policy (or to a new version of the policy). @@ -102,7 +112,9 @@ def change_policy(self, index, body=None, params=None, headers=None): ) @query_params() - def retry(self, index, body=None, params=None, headers=None): + def retry( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Retries the failed action for an index. @@ -120,7 +132,7 @@ def retry(self, index, body=None, params=None, headers=None): ) @query_params("show_policy") - def explain_index(self, index, params=None, headers=None): + def explain_index(self, index: Any, params: Any = None, headers: Any = None) -> Any: """ Gets the current state of the index. @@ -137,7 +149,9 @@ def explain_index(self, index, params=None, headers=None): ) @query_params() - def delete_policy(self, policy, params=None, headers=None): + def delete_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes the policy by `policy_id`. diff --git a/opensearchpy/plugins/index_management.pyi b/opensearchpy/plugins/index_management.pyi deleted file mode 100644 index d4a6dbad..00000000 --- a/opensearchpy/plugins/index_management.pyi +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class IndexManagementClient(NamespacedClient): - def put_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def add_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def get_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def remove_policy_from_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def change_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def retry( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def explain_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/serializer.py b/opensearchpy/serializer.py index 7463dbcc..e8c87ba9 100644 --- a/opensearchpy/serializer.py +++ b/opensearchpy/serializer.py @@ -26,10 +26,12 @@ # under the License. +from typing import Any, Dict, Optional + try: import simplejson as json except ImportError: - import json + import json # type: ignore import uuid from datetime import date, datetime @@ -45,22 +47,22 @@ class Serializer(object): - mimetype = "" + mimetype: str = "" - def loads(self, s): + def loads(self, s: str) -> Any: raise NotImplementedError() - def dumps(self, data): + def dumps(self, data: Any) -> Any: raise NotImplementedError() class TextSerializer(Serializer): - mimetype = "text/plain" + mimetype: str = "text/plain" - def loads(self, s): + def loads(self, s: str) -> Any: return s - def dumps(self, data): + def dumps(self, data: Any) -> Any: if isinstance(data, string_types): return data @@ -68,9 +70,9 @@ def dumps(self, data): class JSONSerializer(Serializer): - mimetype = "application/json" + mimetype: str = "application/json" - def default(self, data): + def default(self, data: Any) -> Any: if isinstance(data, TIME_TYPES): # Little hack to avoid importing pandas but to not # return 'NaT' string for pd.NaT as that's not a valid @@ -142,13 +144,13 @@ def default(self, data): raise TypeError("Unable to serialize %r (type: %s)" % (data, type(data))) - def loads(self, s): + def loads(self, s: str) -> Any: try: return json.loads(s) except (ValueError, TypeError) as e: raise SerializationError(s, e) - def dumps(self, data): + def dumps(self, data: Any) -> Any: # don't serialize strings if isinstance(data, string_types): return data @@ -161,14 +163,18 @@ def dumps(self, data): raise SerializationError(data, e) -DEFAULT_SERIALIZERS = { +DEFAULT_SERIALIZERS: Dict[str, Serializer] = { JSONSerializer.mimetype: JSONSerializer(), TextSerializer.mimetype: TextSerializer(), } class Deserializer(object): - def __init__(self, serializers, default_mimetype="application/json"): + def __init__( + self, + serializers: Dict[str, Serializer], + default_mimetype: str = "application/json", + ) -> None: try: self.default = serializers[default_mimetype] except KeyError: @@ -177,7 +183,7 @@ def __init__(self, serializers, default_mimetype="application/json"): ) self.serializers = serializers - def loads(self, s, mimetype=None): + def loads(self, s: str, mimetype: Optional[str] = None) -> Any: if not mimetype: deserializer = self.default else: @@ -199,7 +205,7 @@ def loads(self, s, mimetype=None): class AttrJSONSerializer(JSONSerializer): - def default(self, data): + def default(self, data: Any) -> Any: if isinstance(data, AttrList): return data._l_ if hasattr(data, "to_dict"): diff --git a/opensearchpy/serializer.pyi b/opensearchpy/serializer.pyi deleted file mode 100644 index 6d798cce..00000000 --- a/opensearchpy/serializer.pyi +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Dict, Optional - -class Serializer(object): - mimetype: str - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -class TextSerializer(Serializer): - mimetype: str - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -class JSONSerializer(Serializer): - mimetype: str - def default(self, data: Any) -> Any: ... - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -DEFAULT_SERIALIZERS: Dict[str, Serializer] - -class Deserializer(object): - def __init__( - self, - serializers: Dict[str, Serializer], - default_mimetype: str = ..., - ) -> None: ... - def loads(self, s: str, mimetype: Optional[str] = ...) -> Any: ... - -class AttrJSONSerializer(JSONSerializer): ... diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index c36178b2..583d9ba7 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -28,8 +28,9 @@ import time from itertools import chain +from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union -from .connection import Urllib3HttpConnection +from .connection import Connection, Urllib3HttpConnection from .connection_pool import ConnectionPool, DummyConnectionPool, EmptyConnectionPool from .exceptions import ( ConnectionError, @@ -37,10 +38,12 @@ SerializationError, TransportError, ) -from .serializer import DEFAULT_SERIALIZERS, Deserializer, JSONSerializer +from .serializer import DEFAULT_SERIALIZERS, Deserializer, JSONSerializer, Serializer -def get_host_info(node_info, host): +def get_host_info( + node_info: Dict[str, Any], host: Optional[Dict[str, Any]] +) -> Optional[Dict[str, Any]]: """ Simple callback that takes the node info from `/_cluster/nodes` and a parsed connection information and return the connection information. If @@ -68,28 +71,50 @@ class Transport(object): Main interface is the `perform_request` method. """ - DEFAULT_CONNECTION_CLASS = Urllib3HttpConnection + DEFAULT_CONNECTION_CLASS: Type[Connection] = Urllib3HttpConnection + + connection_pool: Any + deserializer: Deserializer + + max_retries: int + retry_on_timeout: bool + retry_on_status: Collection[int] + send_get_body_as: str + serializer: Serializer + connection_pool_class: Any + connection_class: Type[Connection] + kwargs: Any + hosts: Any + seed_connections: List[Connection] + sniffer_timeout: Optional[float] + sniff_on_start: bool + sniff_on_connection_fail: bool + last_sniff: float + sniff_timeout: Optional[float] + host_info_callback: Any def __init__( self, - hosts, - connection_class=None, - connection_pool_class=ConnectionPool, - host_info_callback=get_host_info, - sniff_on_start=False, - sniffer_timeout=None, - sniff_timeout=0.1, - sniff_on_connection_fail=False, - serializer=JSONSerializer(), - serializers=None, - default_mimetype="application/json", - max_retries=3, - pool_maxsize=None, - retry_on_status=(502, 503, 504), - retry_on_timeout=False, - send_get_body_as="GET", - **kwargs - ): + hosts: Any, + connection_class: Optional[Type[Connection]] = None, + connection_pool_class: Type[ConnectionPool] = ConnectionPool, + host_info_callback: Callable[ + [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] + ] = get_host_info, + sniff_on_start: bool = False, + sniffer_timeout: Optional[float] = None, + sniff_timeout: float = 0.1, + sniff_on_connection_fail: bool = False, + serializer: Serializer = JSONSerializer(), + serializers: Optional[Mapping[str, Serializer]] = None, + default_mimetype: str = "application/json", + max_retries: int = 3, + pool_maxsize: Optional[int] = None, + retry_on_status: Collection[int] = (502, 503, 504), + retry_on_timeout: bool = False, + send_get_body_as: str = "GET", + **kwargs: Any + ) -> None: """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance @@ -185,7 +210,7 @@ def __init__( if sniff_on_start: self.sniff_hosts(True) - def add_connection(self, host): + def add_connection(self, host: Any) -> None: """ Create a new :class:`~opensearchpy.Connection` instance and add it to the pool. @@ -194,7 +219,7 @@ def add_connection(self, host): self.hosts.append(host) self.set_connections(self.hosts) - def set_connections(self, hosts): + def set_connections(self, hosts: Any) -> None: """ Instantiate all the connections and create new connection pool to hold them. Tries to identify unchanged hosts and re-use existing @@ -204,7 +229,7 @@ def set_connections(self, hosts): """ # construct the connections - def _create_connection(host): + def _create_connection(host: Any) -> Any: # if this is not the initial setup look at the existing connection # options and identify connections that haven't changed and can be # kept around. @@ -220,9 +245,7 @@ def _create_connection(host): kwargs["pool_maxsize"] = self.pool_maxsize return self.connection_class(**kwargs) - connections = map(_create_connection, hosts) - - connections = list(zip(connections, hosts)) + connections = list(zip(map(_create_connection, hosts), hosts)) if len(connections) == 1: self.connection_pool = DummyConnectionPool(connections) else: @@ -231,7 +254,7 @@ def _create_connection(host): connections, **self.kwargs ) - def get_connection(self): + def get_connection(self) -> Any: """ Retrieve a :class:`~opensearchpy.Connection` instance from the :class:`~opensearchpy.ConnectionPool` instance. @@ -241,7 +264,7 @@ def get_connection(self): self.sniff_hosts() return self.connection_pool.get_connection() - def _get_sniff_data(self, initial=False): + def _get_sniff_data(self, initial: bool = False) -> Any: """ Perform the request to get sniffing information. Returns a list of dictionaries (one per node) containing all the information from the @@ -289,7 +312,7 @@ def _get_sniff_data(self, initial=False): return list(node_info["nodes"].values()) - def _get_host_info(self, host_info): + def _get_host_info(self, host_info: Any) -> Any: host = {} address = host_info.get("http", {}).get("publish_address") @@ -310,7 +333,7 @@ def _get_host_info(self, host_info): return self.host_info_callback(host_info, host) - def sniff_hosts(self, initial=False): + def sniff_hosts(self, initial: bool = False) -> Any: """ Obtain a list of nodes from the cluster and create a new connection pool using the information retrieved. @@ -322,7 +345,7 @@ def sniff_hosts(self, initial=False): """ node_info = self._get_sniff_data(initial) - hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) + hosts: Any = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes or host_info_callback blocked all - # raise error. @@ -333,7 +356,7 @@ def sniff_hosts(self, initial=False): self.set_connections(hosts) - def mark_dead(self, connection): + def mark_dead(self, connection: Connection) -> None: """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. @@ -345,7 +368,16 @@ def mark_dead(self, connection): if self.sniff_on_connection_fail: self.sniff_hosts() - def perform_request(self, method, url, headers=None, params=None, body=None): + def perform_request( + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: """ Perform the actual request. Retrieve a connection from the connection pool, pass all the information to its perform_request method and @@ -428,13 +460,13 @@ def perform_request(self, method, url, headers=None, params=None, body=None): ) return data - def close(self): + def close(self) -> Any: """ Explicitly closes connections """ - self.connection_pool.close() + return self.connection_pool.close() - def _resolve_request_args(self, method, params, body): + def _resolve_request_args(self, method: str, params: Any, body: Any) -> Any: """Resolves parameters for .perform_request()""" if body is not None: body = self.serializer.dumps(body) @@ -470,3 +502,6 @@ def _resolve_request_args(self, method, params, body): ignore = (ignore,) return method, params, body, ignore, timeout + + +__all__ = ["TransportError"] diff --git a/opensearchpy/transport.pyi b/opensearchpy/transport.pyi deleted file mode 100644 index fe33cfda..00000000 --- a/opensearchpy/transport.pyi +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union - -from .connection import Connection -from .connection_pool import ConnectionPool -from .serializer import Deserializer, Serializer - -def get_host_info( - node_info: Dict[str, Any], host: Optional[Dict[str, Any]] -) -> Optional[Dict[str, Any]]: ... - -class Transport(object): - DEFAULT_CONNECTION_CLASS: Type[Connection] - connection_pool: ConnectionPool - deserializer: Deserializer - - max_retries: int - retry_on_timeout: bool - retry_on_status: Collection[int] - send_get_body_as: str - serializer: Serializer - connection_pool_class: Type[ConnectionPool] - connection_class: Type[Connection] - kwargs: Any - hosts: Optional[List[Dict[str, Any]]] - seed_connections: List[Connection] - sniffer_timeout: Optional[float] - sniff_on_start: bool - sniff_on_connection_fail: bool - last_sniff: float - sniff_timeout: Optional[float] - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] - ] - def __init__( - self, - hosts: Any, - connection_class: Optional[Type[Any]] = ..., - connection_pool_class: Type[ConnectionPool] = ..., - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] - ] = ..., - sniff_on_start: bool = ..., - sniffer_timeout: Optional[float] = ..., - sniff_timeout: float = ..., - sniff_on_connection_fail: bool = ..., - serializer: Serializer = ..., - serializers: Optional[Mapping[str, Serializer]] = ..., - default_mimetype: str = ..., - max_retries: int = ..., - retry_on_status: Collection[int] = ..., - retry_on_timeout: bool = ..., - send_get_body_as: str = ..., - **kwargs: Any - ) -> None: ... - def add_connection(self, host: Any) -> None: ... - def set_connections(self, hosts: Collection[Any]) -> None: ... - def get_connection(self) -> Connection: ... - def sniff_hosts(self, initial: bool = ...) -> None: ... - def mark_dead(self, connection: Connection) -> None: ... - def perform_request( - self, - method: str, - url: str, - headers: Optional[Mapping[str, str]] = ..., - params: Optional[Mapping[str, Any]] = ..., - body: Optional[Any] = ..., - ) -> Union[bool, Any]: ... - def close(self) -> None: ... diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index 96d7d742..562f82e2 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -18,7 +18,7 @@ # urllib3.disable_warnings() -def test_opensearch_examples(): +def test_opensearch_examples() -> None: # Set up client = OpenSearch( hosts=["https://localhost:9200"], diff --git a/setup.py b/setup.py index f4163840..b608990e 100644 --- a/setup.py +++ b/setup.py @@ -32,12 +32,13 @@ from setuptools import find_packages, setup package_name = "opensearch-py" +package_version = "" base_dir = abspath(dirname(__file__)) with open(join(base_dir, package_name.replace("-", ""), "_version.py")) as f: - package_version = re.search( - r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read() - ).group(1) + m = re.search(r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read()) + if m: + package_version = m.group(1) with open(join(base_dir, "README.md")) as f: long_description = f.read().strip() @@ -90,7 +91,7 @@ "Issue Tracker": "https://github.com/opensearch-project/opensearch-py/issues", }, packages=packages, - package_data={"opensearchpy": ["py.typed", "*.pyi"]}, + package_data={"opensearchpy": ["py.typed"]}, include_package_data=True, zip_safe=False, classifiers=[ diff --git a/test_opensearchpy/TestHttpServer.py b/test_opensearchpy/TestHttpServer.py index d9fb8ede..ba83e041 100644 --- a/test_opensearchpy/TestHttpServer.py +++ b/test_opensearchpy/TestHttpServer.py @@ -41,18 +41,18 @@ def do_GET(self): class TestHTTPServer(HTTPServer): __test__ = False - def __init__(self, host="localhost", port=8080): + def __init__(self, host: str = "localhost", port: int = 8080) -> None: super().__init__((host, port), TestHTTPRequestHandler) self._server_thread = None - def start(self): + def start(self) -> None: if self._server_thread is not None: return self._server_thread = threading.Thread(target=self.serve_forever) self._server_thread.start() - def stop(self): + def stop(self) -> None: if self._server_thread is None: return self.socket.close() diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index 55f1e586..de93adc7 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -39,7 +39,7 @@ from os.path import abspath, dirname, exists, join, pardir -def fetch_opensearch_repo(): +def fetch_opensearch_repo() -> None: # user is manually setting YAML dir, don't tamper with it if "TEST_OPENSEARCH_YAML_DIR" in environ: return @@ -88,7 +88,7 @@ def fetch_opensearch_repo(): subprocess.check_call("cd %s && git fetch origin %s" % (repo_path, sha), shell=True) -def run_all(argv=None): +def run_all(argv: None = None) -> None: sys.exitfunc = lambda: sys.stderr.write("Shutting down....\n") # fetch yaml tests anywhere that's not GitHub Actions if "GITHUB_ACTION" not in environ: diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 3df51645..e72a2358 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -35,6 +35,7 @@ import aiohttp import pytest +from _pytest.mark.structures import MarkDecorator from mock import patch from multidict import CIMultiDict from pytest import raises @@ -45,15 +46,15 @@ from opensearchpy.exceptions import ConnectionError, TransportError from test_opensearchpy.TestHttpServer import TestHTTPServer -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAIOHttpConnection: async def _get_mock_connection( self, connection_params={}, - response_code=200, - response_body=b"{}", + response_code: int = 200, + response_body: bytes = b"{}", response_headers={}, ): con = AIOHttpConnection(**connection_params) @@ -79,7 +80,7 @@ async def text(self): con.session.request = _dummy_request return con - async def test_ssl_context(self): + async def test_ssl_context(self) -> None: try: context = ssl.create_default_context() except AttributeError: @@ -95,11 +96,11 @@ async def test_ssl_context(self): assert con.use_ssl assert con.session.connector._ssl == context - async def test_opaque_id(self): + async def test_opaque_id(self) -> None: con = AIOHttpConnection(opaque_id="app-1") assert con.headers["x-opaque-id"] == "app-1" - async def test_no_http_compression(self): + async def test_no_http_compression(self) -> None: con = await self._get_mock_connection() assert not con.http_compress assert "accept-encoding" not in con.headers @@ -112,7 +113,7 @@ async def test_no_http_compression(self): assert "accept-encoding" not in kwargs["headers"] assert "content-encoding" not in kwargs["headers"] - async def test_http_compression(self): + async def test_http_compression(self) -> None: con = await self._get_mock_connection({"http_compress": True}) assert con.http_compress assert con.headers["accept-encoding"] == "gzip,deflate" @@ -138,7 +139,7 @@ async def test_http_compression(self): assert kwargs["headers"]["accept-encoding"] == "gzip,deflate" assert "content-encoding" not in kwargs["headers"] - async def test_url_prefix(self): + async def test_url_prefix(self) -> None: con = await self._get_mock_connection( connection_params={"url_prefix": "/_search/"} ) @@ -150,18 +151,18 @@ async def test_url_prefix(self): method, yarl_url = con.session.request.call_args[0] assert method == "GET" and str(yarl_url) == "http://localhost:9200/_search/" - async def test_default_user_agent(self): + async def test_default_user_agent(self) -> None: con = AIOHttpConnection() assert con._get_default_user_agent() == "opensearch-py/%s (Python %s)" % ( __versionstr__, python_version(), ) - async def test_timeout_set(self): + async def test_timeout_set(self) -> None: con = AIOHttpConnection(timeout=42) assert 42 == con.timeout - async def test_keep_alive_is_on_by_default(self): + async def test_keep_alive_is_on_by_default(self) -> None: con = AIOHttpConnection() assert { "connection": "keep-alive", @@ -169,7 +170,7 @@ async def test_keep_alive_is_on_by_default(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth(self): + async def test_http_auth(self) -> None: con = AIOHttpConnection(http_auth="username:secret") assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -178,7 +179,7 @@ async def test_http_auth(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth_tuple(self): + async def test_http_auth_tuple(self) -> None: con = AIOHttpConnection(http_auth=("username", "secret")) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -187,7 +188,7 @@ async def test_http_auth_tuple(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth_list(self): + async def test_http_auth_list(self) -> None: con = AIOHttpConnection(http_auth=["username", "secret"]) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -196,7 +197,7 @@ async def test_http_auth_list(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_uses_https_if_verify_certs_is_off(self): + async def test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection(use_ssl=True, verify_certs=False) assert 1 == len(w) @@ -209,7 +210,7 @@ async def test_uses_https_if_verify_certs_is_off(self): assert con.scheme == "https" assert con.host == "https://localhost:9200" - async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self): + async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -219,17 +220,17 @@ async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self): assert isinstance(con.session, aiohttp.ClientSession) - async def test_doesnt_use_https_if_not_specified(self): + async def test_doesnt_use_https_if_not_specified(self) -> None: con = AIOHttpConnection() assert not con.use_ssl - async def test_no_warning_when_using_ssl_context(self): + async def test_no_warning_when_using_ssl_context(self) -> None: ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: AIOHttpConnection(ssl_context=ctx) assert w == [], str([x.message for x in w]) - async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): + async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, @@ -252,32 +253,32 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_given_ca_certs(self, load_verify_locations, tmp_path): + async def test_uses_given_ca_certs(self, load_verify_locations, tmp_path) -> None: path = tmp_path / "ca_certs.pem" path.touch() AIOHttpConnection(use_ssl=True, ca_certs=str(path)) load_verify_locations.assert_called_once_with(cafile=str(path)) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_default_ca_certs(self, load_verify_locations): + async def test_uses_default_ca_certs(self, load_verify_locations) -> None: AIOHttpConnection(use_ssl=True) load_verify_locations.assert_called_once_with( cafile=Connection.default_ca_certs() ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_no_ca_certs(self, load_verify_locations): + async def test_uses_no_ca_certs(self, load_verify_locations) -> None: AIOHttpConnection(use_ssl=True, verify_certs=False) load_verify_locations.assert_not_called() - async def test_trust_env(self): + async def test_trust_env(self) -> None: con = AIOHttpConnection(trust_env=True) await con._create_aiohttp_session() assert con._trust_env is True assert con.session.trust_env is True - async def test_trust_env_default_value_is_false(self): + async def test_trust_env_default_value_is_false(self) -> None: con = AIOHttpConnection() await con._create_aiohttp_session() @@ -285,7 +286,7 @@ async def test_trust_env_default_value_is_false(self): assert con.session.trust_env is False @patch("opensearchpy.connection.base.logger") - async def test_uncompressed_body_logged(self, logger): + async def test_uncompressed_body_logged(self, logger) -> None: con = await self._get_mock_connection(connection_params={"http_compress": True}) await con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -295,14 +296,14 @@ async def test_uncompressed_body_logged(self, logger): assert '> {"example": "body"}' == req[0][0] % req[0][1:] assert "< {}" == resp[0][0] % resp[0][1:] - async def test_surrogatepass_into_bytes(self): + async def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) status, headers, data = await con.perform_request("GET", "/") assert u"你好\uda6a" == data # fmt: skip @pytest.mark.parametrize("exception_cls", reraise_exceptions) - async def test_recursion_error_reraised(self, exception_cls): + async def test_recursion_error_reraised(self, exception_cls) -> None: conn = AIOHttpConnection() def request_raise(*_, **__): @@ -315,7 +316,7 @@ def request_raise(*_, **__): await conn.perform_request("GET", "/") assert str(e.value) == "Wasn't modified!" - async def test_json_errors_are_parsed(self): + async def test_json_errors_are_parsed(self) -> None: con = await self._get_mock_connection( response_code=400, response_body=b'{"error": {"type": "snapshot_in_progress_exception"}}', @@ -334,13 +335,13 @@ class TestConnectionHttpServer: """Tests the HTTP connection implementations against a live server E2E""" @classmethod - def setup_class(cls): + def setup_class(cls) -> None: # Start server cls.server = TestHTTPServer(port=8081) cls.server.start() @classmethod - def teardown_class(cls): + def teardown_class(cls) -> None: # Stop server cls.server.stop() @@ -349,7 +350,7 @@ async def httpserver(self, conn, **kwargs): data = json.loads(data) return (status, data) - async def test_aiohttp_connection(self): + async def test_aiohttp_connection(self) -> None: # Defaults conn = AIOHttpConnection("localhost", port=8081, use_ssl=False) user_agent = conn._get_default_user_agent() @@ -409,13 +410,13 @@ async def test_aiohttp_connection(self): "User-Agent": user_agent, } - async def test_aiohttp_connection_error(self): + async def test_aiohttp_connection_error(self) -> None: conn = AIOHttpConnection("not.a.host.name") with pytest.raises(ConnectionError): await conn.perform_request("GET", "/") -async def test_default_connection_is_returned_by_default(): +async def test_default_connection_is_returned_by_default() -> None: c = async_connections.AsyncConnections() con, con2 = object(), object() @@ -426,7 +427,7 @@ async def test_default_connection_is_returned_by_default(): assert await c.get_connection() is con -async def test_get_connection_created_connection_if_needed(): +async def test_get_connection_created_connection_if_needed() -> None: c = async_connections.AsyncConnections() await c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -439,7 +440,7 @@ async def test_get_connection_created_connection_if_needed(): assert [{"host": "localhost"}] == local.transport.hosts -async def test_configure_preserves_unchanged_connections(): +async def test_configure_preserves_unchanged_connections() -> None: c = async_connections.AsyncConnections() await c.configure( @@ -458,7 +459,7 @@ async def test_configure_preserves_unchanged_connections(): assert new_default is not default -async def test_remove_connection_removes_both_conn_and_conf(): +async def test_remove_connection_removes_both_conn_and_conf() -> None: c = async_connections.AsyncConnections() await c.configure( @@ -475,7 +476,7 @@ async def test_remove_connection_removes_both_conn_and_conf(): await c.get_connection("default") -async def test_create_connection_constructs_client(): +async def test_create_connection_constructs_client() -> None: c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) @@ -483,7 +484,7 @@ async def test_create_connection_constructs_client(): assert [{"host": "opensearch.com"}] == con.transport.hosts -async def test_create_connection_adds_our_serializer(): +async def test_create_connection_adds_our_serializer() -> None: c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) result = await c.get_connection("testing") diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index ca0c8d4c..f24b8a48 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -10,12 +10,13 @@ import pytest +from _pytest.mark.structures import MarkDecorator from mock import Mock from pytest import fixture from opensearchpy.connection.async_connections import add_connection, async_connections -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio @fixture diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 26d49bf0..d13c7272 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -17,6 +17,7 @@ from hashlib import sha256 import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import InnerDoc, MetaField, Range, analyzer @@ -26,7 +27,7 @@ from opensearchpy.exceptions import IllegalOperation, ValidationException from opensearchpy.helpers import field, utils -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class MyInner(InnerDoc): @@ -118,7 +119,7 @@ class Index: name = "test-host" -async def test_range_serializes_properly(): +async def test_range_serializes_properly() -> None: class D(document.AsyncDocument): lr = field.LongRange() @@ -131,7 +132,7 @@ class D(document.AsyncDocument): assert {"lr": {"lt": 42}} == d.to_dict() -async def test_range_deserializes_properly(): +async def test_range_deserializes_properly() -> None: class D(InnerDoc): lr = field.LongRange() @@ -141,13 +142,13 @@ class D(InnerDoc): assert 47 not in d.lr -async def test_resolve_nested(): +async def test_resolve_nested() -> None: nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] -async def test_conflicting_mapping_raises_error_in_index_to_dict(): +async def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: class A(document.AsyncDocument): name = field.Text() @@ -162,18 +163,18 @@ class B(document.AsyncDocument): i.to_dict() -async def test_ip_address_serializes_properly(): +async def test_ip_address_serializes_properly() -> None: host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() -async def test_matches_uses_index(): +async def test_matches_uses_index() -> None: assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) -async def test_matches_with_no_name_always_matches(): +async def test_matches_with_no_name_always_matches() -> None: class D(document.AsyncDocument): pass @@ -181,7 +182,7 @@ class D(document.AsyncDocument): assert D._matches({"_index": "whatever"}) -async def test_matches_accepts_wildcards(): +async def test_matches_accepts_wildcards() -> None: class MyDoc(document.AsyncDocument): class Index: name = "my-*" @@ -190,7 +191,7 @@ class Index: assert not MyDoc._matches({"_index": "not-my-index"}) -async def test_assigning_attrlist_to_field(): +async def test_assigning_attrlist_to_field() -> None: sc = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -198,13 +199,13 @@ async def test_assigning_attrlist_to_field(): assert sc.to_dict()["files"] is ls -async def test_optional_inner_objects_are_not_validated_if_missing(): +async def test_optional_inner_objects_are_not_validated_if_missing() -> None: d = OptionalObjectWithRequiredField() assert d.full_clean() is None -async def test_custom_field(): +async def test_custom_field() -> None: s = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s.to_dict() @@ -215,13 +216,13 @@ async def test_custom_field(): assert isinstance(s.title, Secret) -async def test_custom_field_mapping(): +async def test_custom_field_mapping() -> None: assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() -async def test_custom_field_in_nested(): +async def test_custom_field_in_nested() -> None: s = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -229,7 +230,7 @@ async def test_custom_field_in_nested(): assert s.secrets[0].title == "Hello" -async def test_multi_works_after_doc_has_been_saved(): +async def test_multi_works_after_doc_has_been_saved() -> None: c = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -237,7 +238,7 @@ async def test_multi_works_after_doc_has_been_saved(): assert c.to_dict() == {"files": ["setup.py"]} -async def test_multi_works_in_nested_after_doc_has_been_serialized(): +async def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: # Issue #359 c = DocWithNested(comments=[Comment(title="First!")]) @@ -246,7 +247,7 @@ async def test_multi_works_in_nested_after_doc_has_been_serialized(): assert [] == c.comments[0].tags -async def test_null_value_for_object(): +async def test_null_value_for_object() -> None: d = MyDoc(inner=None) assert d.inner is None @@ -302,21 +303,21 @@ async def test_to_dict_with_meta_includes_custom_index(): assert {"_index": "other-index", "_source": {"title": "hello"}} == d.to_dict(True) -async def test_to_dict_without_skip_empty_will_include_empty_fields(): +async def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: d = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() assert {"tags": [], "title": None, "inner": {}} == d.to_dict(skip_empty=False) -async def test_attribute_can_be_removed(): +async def test_attribute_can_be_removed() -> None: d = MyDoc(title="hello") del d.title assert "title" not in d._d_ -async def test_doc_type_can_be_correctly_pickled(): +async def test_doc_type_can_be_correctly_pickled() -> None: d = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -331,7 +332,7 @@ async def test_doc_type_can_be_correctly_pickled(): assert isinstance(d2.comments[0], Comment) -async def test_meta_is_accessible_even_on_empty_doc(): +async def test_meta_is_accessible_even_on_empty_doc() -> None: d = MyDoc() d.meta @@ -358,7 +359,7 @@ class Meta: } == User._doc_type.mapping.to_dict() -async def test_multi_value_fields(): +async def test_multi_value_fields() -> None: class Blog(document.AsyncDocument): tags = field.Keyword(multi=True) @@ -369,7 +370,7 @@ class Blog(document.AsyncDocument): assert ["search", "python"] == b.tags -async def test_docs_with_properties(): +async def test_docs_with_properties() -> None: class User(document.AsyncDocument): pwd_hash = field.Text() @@ -397,7 +398,7 @@ def password(self, pwd): u.password -async def test_nested_can_be_assigned_to(): +async def test_nested_can_be_assigned_to() -> None: d1 = DocWithNested(comments=[Comment(title="First!")]) d2 = DocWithNested() @@ -408,13 +409,13 @@ async def test_nested_can_be_assigned_to(): assert isinstance(d2.comments[0], Comment) -async def test_nested_can_be_none(): +async def test_nested_can_be_none() -> None: d = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() -async def test_nested_defaults_to_list_and_can_be_updated(): +async def test_nested_defaults_to_list_and_can_be_updated() -> None: md = DocWithNested() assert [] == md.comments @@ -435,7 +436,7 @@ async def test_to_dict_is_recursive_and_can_cope_with_multi_values(): } == md.to_dict() -async def test_to_dict_ignores_empty_collections(): +async def test_to_dict_ignores_empty_collections() -> None: md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() @@ -489,7 +490,7 @@ async def test_document_can_be_created_dynamically(): } == md.to_dict() -async def test_invalid_date_will_raise_exception(): +async def test_invalid_date_will_raise_exception() -> None: md = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -528,7 +529,7 @@ class B(A): } == B._doc_type.mapping.to_dict() -async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): +async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: md = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -555,32 +556,32 @@ async def test_index_inheritance(): } == MyMultiSubDoc._doc_type.mapping.to_dict() -async def test_meta_fields_can_be_set_directly_in_init(): +async def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() md = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -async def test_save_no_index(mock_client): +async def test_save_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): await md.save(using="mock") -async def test_delete_no_index(mock_client): +async def test_delete_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): await md.delete(using="mock") -async def test_update_no_fields(): +async def test_update_no_fields() -> None: md = MyDoc() with raises(IllegalOperation): await md.update() -async def test_search_with_custom_alias_and_index(mock_client): +async def test_search_with_custom_alias_and_index(mock_client) -> None: search_object = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index 34e18008..58c936c0 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -11,11 +11,12 @@ from datetime import datetime import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.faceted_search import AsyncFacetedSearch from opensearchpy.helpers.faceted_search import DateHistogramFacet, TermsFacet -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class BlogSearch(AsyncFacetedSearch): @@ -31,7 +32,7 @@ class BlogSearch(AsyncFacetedSearch): } -async def test_query_is_created_properly(): +async def test_query_is_created_properly() -> None: bs = BlogSearch("python search") s = bs.build_search() @@ -135,7 +136,7 @@ async def test_filters_are_applied_to_search_ant_relevant_facets(): } == d -async def test_date_histogram_facet_with_1970_01_01_date(): +async def test_date_histogram_facet_with_1970_01_01_date() -> None: dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -168,7 +169,7 @@ async def test_date_histogram_facet_with_1970_01_01_date(): ("fixed_interval", "1h"), ], ) -async def test_date_histogram_interval_types(interval_type, interval): +async def test_date_histogram_interval_types(interval_type, interval) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -180,7 +181,7 @@ async def test_date_histogram_interval_types(interval_type, interval): dhf.get_value_filter(datetime.now()) -async def test_date_histogram_no_interval_keyerror(): +async def test_date_histogram_no_interval_keyerror() -> None: dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index 1958f80f..681b9cfe 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -12,13 +12,14 @@ from random import choice import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import Date, Text, analyzer from opensearchpy._async.helpers.document import AsyncDocument from opensearchpy._async.helpers.index import AsyncIndex -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Post(AsyncDocument): @@ -26,7 +27,7 @@ class Post(AsyncDocument): published_from = Date() -async def test_multiple_doc_types_will_combine_mappings(): +async def test_multiple_doc_types_will_combine_mappings() -> None: class User(AsyncDocument): username = Text() @@ -44,14 +45,14 @@ class User(AsyncDocument): } == i.to_dict() -async def test_search_is_limited_to_index_name(): +async def test_search_is_limited_to_index_name() -> None: i = AsyncIndex("my-index") s = i.search() assert s._index == ["my-index"] -async def test_cloned_index_has_copied_settings_and_using(): +async def test_cloned_index_has_copied_settings_and_using() -> None: client = object() i = AsyncIndex("my-index", using=client) i.settings(number_of_shards=1) @@ -64,7 +65,7 @@ async def test_cloned_index_has_copied_settings_and_using(): assert i._settings is not i2._settings -async def test_cloned_index_has_analysis_attribute(): +async def test_cloned_index_has_analysis_attribute() -> None: """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -84,7 +85,7 @@ async def test_cloned_index_has_analysis_attribute(): assert i.to_dict()["settings"]["analysis"] == i2.to_dict()["settings"]["analysis"] -async def test_settings_are_saved(): +async def test_settings_are_saved() -> None: i = AsyncIndex("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -92,7 +93,7 @@ async def test_settings_are_saved(): assert {"settings": {"number_of_shards": 1, "number_of_replicas": 0}} == i.to_dict() -async def test_registered_doc_type_included_in_to_dict(): +async def test_registered_doc_type_included_in_to_dict() -> None: i = AsyncIndex("i", using="alias") i.document(Post) @@ -106,7 +107,7 @@ async def test_registered_doc_type_included_in_to_dict(): } == i.to_dict() -async def test_registered_doc_type_included_in_search(): +async def test_registered_doc_type_included_in_search() -> None: i = AsyncIndex("i", using="alias") i.document(Post) @@ -115,7 +116,7 @@ async def test_registered_doc_type_included_in_search(): assert s._doc_type == [Post] -async def test_aliases_add_to_object(): +async def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -125,7 +126,7 @@ async def test_aliases_add_to_object(): assert index._aliases == alias_dict -async def test_aliases_returned_from_to_dict(): +async def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -164,7 +165,7 @@ async def test_analyzers_returned_from_to_dict(): ] == {"filter": ["standard"], "type": "custom", "tokenizer": "standard"} -async def test_conflicting_analyzer_raises_error(): +async def test_conflicting_analyzer_raises_error() -> None: i = AsyncIndex("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index 7c9e799f..6ae4c0b7 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -15,7 +15,7 @@ from opensearchpy.helpers import analysis -async def test_mapping_can_has_fields(): +async def test_mapping_can_has_fields() -> None: m = mapping.AsyncMapping() m.field("name", "text").field("tags", "keyword") @@ -57,7 +57,7 @@ async def test_mapping_update_is_recursive(): } == m1.to_dict() -async def test_properties_can_iterate_over_all_the_fields(): +async def test_properties_can_iterate_over_all_the_fields() -> None: m = mapping.AsyncMapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -186,7 +186,7 @@ async def test_mapping_can_collect_multiple_analyzers(): } == m._collect_analysis() -async def test_even_non_custom_analyzers_can_have_params(): +async def test_even_non_custom_analyzers_can_have_params() -> None: a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.AsyncMapping() m.field("title", "text", analyzer=a1) @@ -196,14 +196,14 @@ async def test_even_non_custom_analyzers_can_have_params(): } == m._collect_analysis() -async def test_resolve_field_can_resolve_multifields(): +async def test_resolve_field_can_resolve_multifields() -> None: m = mapping.AsyncMapping() m.field("title", "text", fields={"keyword": Keyword()}) assert isinstance(m.resolve_field("title.keyword"), Keyword) -async def test_resolve_nested(): +async def test_resolve_nested() -> None: m = mapping.AsyncMapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index 784193ee..c32a8c7c 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -11,6 +11,7 @@ from copy import deepcopy import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy._async.helpers import search @@ -19,16 +20,16 @@ from opensearchpy.helpers import query from opensearchpy.helpers.query import Q -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_expand__to_dot_is_respected(): +async def test_expand__to_dot_is_respected() -> None: s = search.AsyncSearch().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() -async def test_execute_uses_cache(): +async def test_execute_uses_cache() -> None: s = search.AsyncSearch() r = object() s._response = r @@ -36,20 +37,20 @@ async def test_execute_uses_cache(): assert r is await s.execute() -async def test_cache_isnt_cloned(): +async def test_cache_isnt_cloned() -> None: s = search.AsyncSearch() s._response = object() assert not hasattr(s._clone(), "_response") -async def test_search_starts_with_no_query(): +async def test_search_starts_with_no_query() -> None: s = search.AsyncSearch() assert s.query._proxied is None -async def test_search_query_combines_query(): +async def test_search_query_combines_query() -> None: s = search.AsyncSearch() s2 = s.query("match", f=42) @@ -61,7 +62,7 @@ async def test_search_query_combines_query(): assert s3.query._proxied == query.Bool(must=[query.Match(f=42), query.Match(f=43)]) -async def test_query_can_be_assigned_to(): +async def test_query_can_be_assigned_to() -> None: s = search.AsyncSearch() q = Q("match", title="python") @@ -85,7 +86,7 @@ async def test_query_can_be_wrapped(): } == s.to_dict() -async def test_using(): +async def test_using() -> None: o = object() o2 = object() s = search.AsyncSearch(using=o) @@ -95,19 +96,19 @@ async def test_using(): assert s2._using is o2 -async def test_methods_are_proxied_to_the_query(): +async def test_methods_are_proxied_to_the_query() -> None: s = search.AsyncSearch().query("match_all") assert s.query.to_dict() == {"match_all": {}} -async def test_query_always_returns_search(): +async def test_query_always_returns_search() -> None: s = search.AsyncSearch() assert isinstance(s.query("match", f=42), search.AsyncSearch) -async def test_source_copied_on_clone(): +async def test_source_copied_on_clone() -> None: s = search.AsyncSearch().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -121,7 +122,7 @@ async def test_source_copied_on_clone(): assert s3._clone()._source == ["some", "fields"] -async def test_copy_clones(): +async def test_copy_clones() -> None: from copy import copy s1 = search.AsyncSearch().source(["some", "fields"]) @@ -131,7 +132,7 @@ async def test_copy_clones(): assert s1 is not s2 -async def test_aggs_allow_two_metric(): +async def test_aggs_allow_two_metric() -> None: s = search.AsyncSearch() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -173,7 +174,7 @@ async def test_aggs_get_copied_on_change(): assert d == s4.to_dict() -async def test_search_index(): +async def test_search_index() -> None: s = search.AsyncSearch(index="i") assert s._index == ["i"] s = s.index("i2") @@ -204,7 +205,7 @@ async def test_search_index(): assert s2._index == ["i", "i2", "i3", "i4", "i5"] -async def test_doc_type_document_class(): +async def test_doc_type_document_class() -> None: class MyDocument(AsyncDocument): pass @@ -229,7 +230,7 @@ async def test_sort(): assert search.AsyncSearch().to_dict() == s.to_dict() -async def test_sort_by_score(): +async def test_sort_by_score() -> None: s = search.AsyncSearch() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -239,7 +240,7 @@ async def test_sort_by_score(): s.sort("-_score") -async def test_slice(): +async def test_slice() -> None: s = search.AsyncSearch() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -248,7 +249,7 @@ async def test_slice(): assert {"from": 20, "size": 0} == s[20:0].to_dict() -async def test_index(): +async def test_index() -> None: s = search.AsyncSearch() assert {"from": 3, "size": 1} == s[3].to_dict() @@ -383,13 +384,13 @@ async def test_reverse(): assert d == s.to_dict() -async def test_from_dict_doesnt_need_query(): +async def test_from_dict_doesnt_need_query() -> None: s = search.AsyncSearch.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() -async def test_source(): +async def test_source() -> None: assert {} == search.AsyncSearch().source().to_dict() assert { @@ -420,7 +421,7 @@ async def test_source_on_clone(): } == search.AsyncSearch().source(False).filter("term", title="python").to_dict() -async def test_source_on_clear(): +async def test_source_on_clear() -> None: assert ( {} == search.AsyncSearch() @@ -461,7 +462,7 @@ async def test_suggest(): } == s.to_dict() -async def test_exclude(): +async def test_exclude() -> None: s = search.AsyncSearch() s = s.exclude("match", title="python") diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index 340bd1b7..b15983dc 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -11,15 +11,16 @@ from copy import deepcopy import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import Q from opensearchpy._async.helpers import update_by_query from opensearchpy.helpers.response import UpdateByQueryResponse -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_ubq_starts_with_no_query(): +async def test_ubq_starts_with_no_query() -> None: ubq = update_by_query.AsyncUpdateByQuery() assert ubq.query._proxied is None @@ -81,7 +82,7 @@ async def test_complex_example(): } == ubq.to_dict() -async def test_exclude(): +async def test_exclude() -> None: ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -130,7 +131,7 @@ async def test_reverse(): assert d == ubq.to_dict() -async def test_from_dict_doesnt_need_query(): +async def test_from_dict_doesnt_need_query() -> None: ubq = update_by_query.AsyncUpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() @@ -152,7 +153,7 @@ async def test_overwrite_script(): assert {"script": {"source": "ctx._source.likes++"}} == ubq.to_dict() -async def test_update_by_query_response_success(): +async def test_update_by_query_response_success() -> None: ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index 282a61c7..913a944d 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -28,29 +28,30 @@ import mock import pytest +from _pytest.mark.structures import MarkDecorator from multidict import CIMultiDict from opensearchpy._async._extra_imports import aiohttp from opensearchpy._async.compat import get_running_loop from opensearchpy.connection.http_async import AsyncHttpConnection -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAsyncHttpConnection: - def test_auth_as_tuple(self): + def test_auth_as_tuple(self) -> None: c = AsyncHttpConnection(http_auth=("username", "password")) assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" - def test_auth_as_string(self): + def test_auth_as_string(self) -> None: c = AsyncHttpConnection(http_auth="username:password") assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" - def test_auth_as_callable(self): + def test_auth_as_callable(self) -> None: def auth_fn(): pass @@ -58,7 +59,7 @@ def auth_fn(): assert callable(c._http_auth) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_basicauth_in_request_session(self, mock_request): + async def test_basicauth_in_request_session(self, mock_request) -> None: async def do_request(*args, **kwargs): response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() @@ -89,7 +90,7 @@ async def do_request(*args, **kwargs): ) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_callable_in_request_session(self, mock_request): + async def test_callable_in_request_session(self, mock_request) -> None: def auth_fn(*args, **kwargs): return { "Test": "PASSED", diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index c620873c..2364f0fa 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -14,7 +14,7 @@ class TestPluginsClient(TestCase): - async def test_plugins_client(self): + async def test_plugins_client(self) -> None: with self.assertWarns(Warning) as w: client = AsyncOpenSearch() client.plugins.__init__(client) # double-init diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index 794aeb53..36571a71 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -35,13 +35,13 @@ class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): - async def asyncSetUp(self): + async def asyncSetUp(self) -> None: self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self): + async def asyncTearDown(self) -> None: wipe_cluster(self.client) if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 2c49aca3..908313ee 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -29,13 +29,14 @@ import asyncio import pytest +from _pytest.mark.structures import MarkDecorator import opensearchpy from opensearchpy.helpers.test import OPENSEARCH_URL from ...utils import wipe_cluster -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio @pytest.fixture(scope="function") diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index 17104312..41a07012 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -29,24 +29,25 @@ from __future__ import unicode_literals import pytest +from _pytest.mark.structures import MarkDecorator -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestUnicode: - async def test_indices_analyze(self, async_client): + async def test_indices_analyze(self, async_client) -> None: await async_client.indices.analyze(body='{"text": "привет"}') class TestBulk: - async def test_bulk_works_with_string_body(self, async_client): + async def test_bulk_works_with_string_body(self, async_client) -> None: docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) assert response["errors"] is False assert len(response["items"]) == 1 - async def test_bulk_works_with_bytestring_body(self, async_client): + async def test_bulk_works_with_bytestring_body(self, async_client) -> None: docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -57,7 +58,7 @@ async def test_bulk_works_with_bytestring_body(self, async_client): class TestYarlMissing: async def test_aiohttp_connection_works_without_yarl( self, async_client, monkeypatch - ): + ) -> None: # This is a defensive test case for if aiohttp suddenly stops using yarl. from opensearchpy._async import http_aiohttp diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index e6d79c46..36ea7a10 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -96,7 +96,7 @@ async def pull_request(write_client): @fixture -async def setup_ubq_tests(client): +async def setup_ubq_tests(client) -> str: index = "test-git" await create_git_index(client, index) await async_bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 425eb2c7..dee69819 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -27,6 +27,7 @@ import asyncio +from typing import Tuple import pytest from mock import MagicMock, patch @@ -48,8 +49,11 @@ def __await__(self): class FailingBulkClient(object): def __init__( - self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) - ): + self, + client, + fail_at: Tuple[int] = (2,), + fail_with=TransportError(599, "Error!", {}), + ) -> None: self.client = client self._called = 0 self._fail_at = fail_at @@ -64,7 +68,7 @@ async def bulk(self, *args, **kwargs): class TestStreamingBulk(object): - async def test_actions_remain_unchanged(self, async_client): + async def test_actions_remain_unchanged(self, async_client) -> None: actions1 = [{"_id": 1}, {"_id": 2}] async for ok, item in actions.async_streaming_bulk( async_client, actions1, index="test-index" @@ -72,7 +76,7 @@ async def test_actions_remain_unchanged(self, async_client): assert ok assert [{"_id": 1}, {"_id": 2}] == actions1 - async def test_all_documents_get_inserted(self, async_client): + async def test_all_documents_get_inserted(self, async_client) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] async for ok, item in actions.async_streaming_bulk( async_client, docs, index="test-index", refresh=True @@ -118,7 +122,9 @@ def sync_gen(): "_source" ] - async def test_all_errors_from_chunk_are_raised_on_failure(self, async_client): + async def test_all_errors_from_chunk_are_raised_on_failure( + self, async_client + ) -> None: await async_client.indices.create( "i", { @@ -187,7 +193,7 @@ async def test_transport_error_can_becaught(self, async_client): } } == results[1][1] - async def test_rejected_documents_are_retried(self, async_client): + async def test_rejected_documents_are_retried(self, async_client) -> None: failing_client = FailingBulkClient( async_client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -217,7 +223,7 @@ async def test_rejected_documents_are_retried(self, async_client): async def test_rejected_documents_are_retried_at_most_max_retries_times( self, async_client - ): + ) -> None: failing_client = FailingBulkClient( async_client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -246,7 +252,9 @@ async def test_rejected_documents_are_retried_at_most_max_retries_times( assert {"value": 2, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called - async def test_transport_error_is_raised_with_max_retries(self, async_client): + async def test_transport_error_is_raised_with_max_retries( + self, async_client + ) -> None: failing_client = FailingBulkClient( async_client, fail_at=(1, 2, 3, 4), @@ -272,7 +280,7 @@ async def streaming_bulk(): class TestBulk(object): - async def test_bulk_works_with_single_item(self, async_client): + async def test_bulk_works_with_single_item(self, async_client) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -285,7 +293,7 @@ async def test_bulk_works_with_single_item(self, async_client): "_source" ] - async def test_all_documents_get_inserted(self, async_client): + async def test_all_documents_get_inserted(self, async_client) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -298,7 +306,7 @@ async def test_all_documents_get_inserted(self, async_client): "_source" ] - async def test_stats_only_reports_numbers(self, async_client): + async def test_stats_only_reports_numbers(self, async_client) -> None: docs = [{"answer": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True, stats_only=True @@ -402,7 +410,7 @@ async def test_errors_are_collected_properly(self, async_client): class MockScroll: - def __init__(self): + def __init__(self) -> None: self.calls = [] async def __call__(self, *args, **kwargs): @@ -424,7 +432,7 @@ async def __call__(self, *args, **kwargs): class MockResponse: - def __init__(self, resp): + def __init__(self, resp) -> None: self.resp = resp async def __call__(self, *args, **kwargs): @@ -564,7 +572,7 @@ async def test_initial_search_error(self, async_client, scan_teardown): assert data == [{"search_data": 1}] assert mock_scroll.calls == [] - async def test_no_scroll_id_fast_route(self, async_client, scan_teardown): + async def test_no_scroll_id_fast_route(self, async_client, scan_teardown) -> None: with patch.object(async_client, "search", MockResponse({"no": "_scroll_id"})): with patch.object(async_client, "scroll") as scroll_mock: with patch.object(async_client, "clear_scroll") as clear_mock: @@ -776,7 +784,7 @@ async def reindex_setup(async_client): class TestReindex(object): async def test_reindex_passes_kwargs_to_scan_and_bulk( self, async_client, reindex_setup - ): + ) -> None: await actions.async_reindex( async_client, "test_index", @@ -795,7 +803,7 @@ async def test_reindex_passes_kwargs_to_scan_and_bulk( await async_client.get(index="prod_index", id=42) )["_source"] - async def test_reindex_accepts_a_query(self, async_client, reindex_setup): + async def test_reindex_accepts_a_query(self, async_client, reindex_setup) -> None: await actions.async_reindex( async_client, "test_index", @@ -814,7 +822,7 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup): await async_client.get(index="prod_index", id=42) )["_source"] - async def test_all_documents_get_moved(self, async_client, reindex_setup): + async def test_all_documents_get_moved(self, async_client, reindex_setup) -> None: await actions.async_reindex(async_client, "test_index", "prod_index") await async_client.indices.refresh() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index bc2df5ba..99f2486d 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -10,6 +10,8 @@ from __future__ import unicode_literals +from typing import Any, Dict + async def create_flat_git_index(client, index): # we will use user on several places @@ -1076,7 +1078,7 @@ async def create_git_index(client, index): ] -def flatten_doc(d): +def flatten_doc(d) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1085,7 +1087,7 @@ def flatten_doc(d): FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d): +def create_test_git_data(d) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 650c7b39..67982918 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -146,7 +146,7 @@ async def test_serialization(write_client): } -async def test_nested_inner_hits_are_wrapped_properly(pull_request): +async def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: history_query = Q( "nested", path="comments.history", @@ -174,7 +174,7 @@ async def test_nested_inner_hits_are_wrapped_properly(pull_request): assert "score" in history.meta -async def test_nested_inner_hits_are_deserialized_properly(pull_request): +async def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -189,7 +189,7 @@ async def test_nested_inner_hits_are_deserialized_properly(pull_request): assert isinstance(pr.comments[0].created_at, datetime) -async def test_nested_top_hits_are_wrapped_properly(pull_request): +async def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -201,7 +201,7 @@ async def test_nested_top_hits_are_wrapped_properly(pull_request): assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -async def test_update_object_field(write_client): +async def test_update_object_field(write_client) -> None: await Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -221,7 +221,7 @@ async def test_update_object_field(write_client): assert w.ranked == {"test1": 0.1, "topic2": 0.2} -async def test_update_script(write_client): +async def test_update_script(write_client) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -231,7 +231,7 @@ async def test_update_script(write_client): assert w.views == 47 -async def test_update_retry_on_conflict(write_client): +async def test_update_retry_on_conflict(write_client) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -250,7 +250,7 @@ async def test_update_retry_on_conflict(write_client): @pytest.mark.parametrize("retry_on_conflict", [None, 0]) -async def test_update_conflicting_version(write_client, retry_on_conflict): +async def test_update_conflicting_version(write_client, retry_on_conflict) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -267,7 +267,7 @@ async def test_update_conflicting_version(write_client, retry_on_conflict): ) -async def test_save_and_update_return_doc_meta(write_client): +async def test_save_and_update_return_doc_meta(write_client) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = await w.save(return_doc_meta=True) @@ -291,31 +291,33 @@ async def test_save_and_update_return_doc_meta(write_client): assert resp.keys().__contains__("_version") -async def test_init(write_client): +async def test_init(write_client) -> None: await Repository.init(index="test-git") assert await write_client.indices.exists(index="test-git") -async def test_get_raises_404_on_index_missing(data_client): +async def test_get_raises_404_on_index_missing(data_client) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php", index="not-there") -async def test_get_raises_404_on_non_existent_id(data_client): +async def test_get_raises_404_on_non_existent_id(data_client) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php") -async def test_get_returns_none_if_404_ignored(data_client): +async def test_get_returns_none_if_404_ignored(data_client) -> None: assert None is await Repository.get("opensearch-dsl-php", ignore=404) -async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client): +async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( + data_client, +) -> None: assert None is await Repository.get("42", index="not-there", ignore=404) -async def test_get(data_client): +async def test_get(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -323,15 +325,15 @@ async def test_get(data_client): assert datetime(2014, 3, 3) == opensearch_repo.created_at -async def test_exists_return_true(data_client): +async def test_exists_return_true(data_client) -> None: assert await Repository.exists("opensearch-py") -async def test_exists_false(data_client): +async def test_exists_false(data_client) -> None: assert not await Repository.exists("opensearch-dsl-php") -async def test_get_with_tz_date(data_client): +async def test_get_with_tz_date(data_client) -> None: first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -343,7 +345,7 @@ async def test_get_with_tz_date(data_client): ) -async def test_save_with_tz_date(data_client): +async def test_save_with_tz_date(data_client) -> None: tzinfo = timezone("Europe/Prague") first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -370,7 +372,7 @@ async def test_save_with_tz_date(data_client): ] -async def test_mget(data_client): +async def test_mget(data_client) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -378,23 +380,25 @@ async def test_mget(data_client): assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_mget_raises_exception_when_missing_param_is_invalid(data_client): +async def test_mget_raises_exception_when_missing_param_is_invalid(data_client) -> None: with raises(ValueError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -async def test_mget_raises_404_when_missing_param_is_raise(data_client): +async def test_mget_raises_404_when_missing_param_is_raise(data_client) -> None: with raises(NotFoundError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") -async def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client): +async def test_mget_ignores_missing_docs_when_missing_param_is_skip( + data_client, +) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_update_works_from_search_response(data_client): +async def test_update_works_from_search_response(data_client) -> None: opensearch_repo = (await Repository.search().execute())[0] await opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -405,7 +409,7 @@ async def test_update_works_from_search_response(data_client): assert "opensearch" == new_version.owner.name -async def test_update(data_client): +async def test_update(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -429,7 +433,7 @@ async def test_update(data_client): assert "primary_term" in new_version.meta -async def test_save_updates_existing_doc(data_client): +async def test_save_updates_existing_doc(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -442,7 +446,7 @@ async def test_save_updates_existing_doc(data_client): assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -async def test_save_automatically_uses_seq_no_and_primary_term(data_client): +async def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -450,7 +454,7 @@ async def test_save_automatically_uses_seq_no_and_primary_term(data_client): await opensearch_repo.save() -async def test_delete_automatically_uses_seq_no_and_primary_term(data_client): +async def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -458,7 +462,7 @@ async def test_delete_automatically_uses_seq_no_and_primary_term(data_client): await opensearch_repo.delete() -async def assert_doc_equals(expected, actual): +async def assert_doc_equals(expected, actual) -> None: async for f in aiter(expected): assert f in actual assert actual[f] == expected[f] @@ -479,7 +483,7 @@ async def test_can_save_to_different_index(write_client): ) -async def test_save_without_skip_empty_will_include_empty_fields(write_client): +async def test_save_without_skip_empty_will_include_empty_fields(write_client) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert await test_repo.save(index="test-document", skip_empty=False) @@ -494,7 +498,7 @@ async def test_save_without_skip_empty_will_include_empty_fields(write_client): ) -async def test_delete(write_client): +async def test_delete(write_client) -> None: await write_client.create( index="test-document", id="opensearch-py", @@ -515,11 +519,11 @@ async def test_delete(write_client): ) -async def test_search(data_client): +async def test_search(data_client) -> None: assert await Repository.search().count() == 1 -async def test_search_returns_proper_doc_classes(data_client): +async def test_search_returns_proper_doc_classes(data_client) -> None: result = await Repository.search().execute() opensearch_repo = result.hits[0] @@ -528,7 +532,7 @@ async def test_search_returns_proper_doc_classes(data_client): assert opensearch_repo.owner.name == "opensearch" -async def test_refresh_mapping(data_client): +async def test_refresh_mapping(data_client) -> None: class Commit(AsyncDocument): class Index: name = "git" @@ -542,7 +546,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -async def test_highlight_in_meta(data_client): +async def test_highlight_in_meta(data_client) -> None: commit = ( await Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 9f2d919b..bc7abbd8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -11,6 +11,7 @@ from datetime import datetime import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import A, Boolean, Date, Keyword from opensearchpy._async.helpers.document import AsyncDocument @@ -25,7 +26,7 @@ PullRequest, ) -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Repos(AsyncDocument): @@ -118,7 +119,7 @@ class PRSearch(AsyncFacetedSearch): return PRSearch -async def test_facet_with_custom_metric(data_client): +async def test_facet_with_custom_metric(data_client) -> None: ms = MetricSearch() r = await ms.execute() @@ -127,7 +128,7 @@ async def test_facet_with_custom_metric(data_client): assert dates[0] == 1399038439000 -async def test_nested_facet(pull_request, pr_search_cls): +async def test_nested_facet(pull_request, pr_search_cls) -> None: prs = pr_search_cls() r = await prs.execute() @@ -135,7 +136,7 @@ async def test_nested_facet(pull_request, pr_search_cls): assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -async def test_nested_facet_with_filter(pull_request, pr_search_cls): +async def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = await prs.execute() @@ -147,7 +148,7 @@ async def test_nested_facet_with_filter(pull_request, pr_search_cls): assert not r.hits -async def test_datehistogram_facet(data_client, repo_search_cls): +async def test_datehistogram_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = await rs.execute() @@ -155,7 +156,7 @@ async def test_datehistogram_facet(data_client, repo_search_cls): assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -async def test_boolean_facet(data_client, repo_search_cls): +async def test_boolean_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = await rs.execute() @@ -167,7 +168,7 @@ async def test_boolean_facet(data_client, repo_search_cls): async def test_empty_search_finds_everything( data_client, opensearch_version, commit_search_cls -): +) -> None: cs = commit_search_cls() r = await cs.execute() assert r.hits.total.value == 52 @@ -213,7 +214,7 @@ async def test_empty_search_finds_everything( async def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = await cs.execute() @@ -259,7 +260,7 @@ async def test_term_filters_are_shown_as_selected_and_data_is_filtered( async def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"deletions": "better"}) r = await cs.execute() @@ -267,7 +268,7 @@ async def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -async def test_pagination(data_client, commit_search_cls): +async def test_pagination(data_client, commit_search_cls) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index cc489052..f11e6d3f 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -9,13 +9,14 @@ # GitHub history for details. import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import Date, Text from opensearchpy._async.helpers.document import AsyncDocument from opensearchpy._async.helpers.index import AsyncIndex, AsyncIndexTemplate from opensearchpy.helpers import analysis -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Post(AsyncDocument): @@ -23,7 +24,7 @@ class Post(AsyncDocument): published_from = Date() -async def test_index_template_works(write_client): +async def test_index_template_works(write_client) -> None: it = AsyncIndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -44,7 +45,7 @@ async def test_index_template_works(write_client): } == await write_client.indices.get_mapping(index="test-blog") -async def test_index_can_be_saved_even_with_settings(write_client): +async def test_index_can_be_saved_even_with_settings(write_client) -> None: i = AsyncIndex("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) await i.save() @@ -59,12 +60,12 @@ async def test_index_can_be_saved_even_with_settings(write_client): ) -async def test_index_exists(data_client): +async def test_index_exists(data_client) -> None: assert await AsyncIndex("git").exists() assert not await AsyncIndex("not-there").exists() -async def test_index_can_be_created_with_settings_and_mappings(write_client): +async def test_index_can_be_created_with_settings_and_mappings(write_client) -> None: i = AsyncIndex("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -89,7 +90,7 @@ async def test_index_can_be_created_with_settings_and_mappings(write_client): } -async def test_delete(write_client): +async def test_delete(write_client) -> None: await write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -100,7 +101,7 @@ async def test_delete(write_client): assert not await write_client.indices.exists(index="test-index") -async def test_multiple_indices_with_same_doc_type_work(write_client): +async def test_multiple_indices_with_same_doc_type_work(write_client) -> None: i1 = AsyncIndex("test-index-1", using=write_client) i2 = AsyncIndex("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index 1dca7959..6be391b3 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -9,16 +9,17 @@ # GitHub history for details. import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import exceptions from opensearchpy._async.helpers import mapping from opensearchpy.helpers import analysis -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_mapping_saved_into_opensearch(write_client): +async def test_mapping_saved_into_opensearch(write_client) -> None: m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -40,7 +41,7 @@ async def test_mapping_saved_into_opensearch(write_client): async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( write_client, -): +) -> None: m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -65,7 +66,7 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( async def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client, -): +) -> None: m = mapping.AsyncMapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index 8eb202f7..2b995c54 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -11,6 +11,7 @@ from __future__ import unicode_literals import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import Date, Keyword, Q, Text, TransportError @@ -19,7 +20,7 @@ from opensearchpy.helpers.response import aggs from test_opensearchpy.test_async.test_server.test_helpers.test_data import FLAT_DATA -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Repository(AsyncDocument): @@ -40,7 +41,7 @@ class Index: name = "flat-git" -async def test_filters_aggregation_buckets_are_accessible(data_client): +async def test_filters_aggregation_buckets_are_accessible(data_client) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -61,7 +62,7 @@ async def test_filters_aggregation_buckets_are_accessible(data_client): ) -async def test_top_hits_are_wrapped_in_response(data_client): +async def test_top_hits_are_wrapped_in_response(data_client) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -77,7 +78,7 @@ async def test_top_hits_are_wrapped_in_response(data_client): assert isinstance(hits[0], Commit) -async def test_inner_hits_are_wrapped_in_response(data_client): +async def test_inner_hits_are_wrapped_in_response(data_client) -> None: s = AsyncSearch(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -88,7 +89,7 @@ async def test_inner_hits_are_wrapped_in_response(data_client): assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: result = Repository.search().scan() repos = await get_result(result) @@ -97,7 +98,7 @@ async def test_scan_respects_doc_types(data_client): assert repos[0].organization == "opensearch" -async def test_scan_iterates_through_all_docs(data_client): +async def test_scan_iterates_through_all_docs(data_client) -> None: s = AsyncSearch(index="flat-git") result = s.scan() commits = await get_result(result) @@ -113,7 +114,7 @@ async def get_result(b): return a -async def test_multi_search(data_client): +async def test_multi_search(data_client) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") @@ -130,7 +131,7 @@ async def test_multi_search(data_client): assert r2._search is s2 -async def test_multi_missing(data_client): +async def test_multi_missing(data_client) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") s3 = AsyncSearch(index="does_not_exist") @@ -153,7 +154,7 @@ async def test_multi_missing(data_client): assert r3 is None -async def test_raw_subfield_can_be_used_in_aggs(data_client): +async def test_raw_subfield_can_be_used_in_aggs(data_client) -> None: s = AsyncSearch(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) r = await s.execute() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index 2db68326..4dcf32b3 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -9,14 +9,15 @@ # GitHub history for details. import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.update_by_query import AsyncUpdateByQuery from opensearchpy.helpers.search import Q -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_update_by_query_no_script(write_client, setup_ubq_tests): +async def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -35,7 +36,7 @@ async def test_update_by_query_no_script(write_client, setup_ubq_tests): assert response.success() -async def test_update_by_query_with_script(write_client, setup_ubq_tests): +async def test_update_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -52,7 +53,7 @@ async def test_update_by_query_with_script(write_client, setup_ubq_tests): assert response.version_conflicts == 0 -async def test_delete_by_query_with_script(write_client, setup_ubq_tests): +async def test_delete_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index 2ef87bd3..88b792db 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -14,12 +14,13 @@ import unittest import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy.helpers.test import OPENSEARCH_VERSION from .. import AsyncOpenSearchTestCase -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAlertingPlugin(AsyncOpenSearchTestCase): @@ -43,7 +44,7 @@ async def test_create_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_get_destination(self): + async def test_get_destination(self) -> None: # Create a dummy destination await self.test_create_destination() @@ -123,7 +124,7 @@ async def test_create_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_search_monitor(self): + async def test_search_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() @@ -141,7 +142,7 @@ async def test_search_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_get_monitor(self): + async def test_get_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() @@ -165,7 +166,7 @@ async def test_get_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_run_monitor(self): + async def test_run_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py index d4379648..4f5fcfa1 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py @@ -12,12 +12,13 @@ from __future__ import unicode_literals import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy.exceptions import NotFoundError from .. import AsyncOpenSearchTestCase -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestIndexManagementPlugin(AsyncOpenSearchTestCase): @@ -68,7 +69,7 @@ class TestIndexManagementPlugin(AsyncOpenSearchTestCase): } } - async def test_create_policy(self): + async def test_create_policy(self) -> None: # Test to create policy response = await self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -77,7 +78,7 @@ async def test_create_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - async def test_get_policy(self): + async def test_get_policy(self) -> None: # Create a policy await self.test_create_policy() @@ -88,7 +89,7 @@ async def test_get_policy(self): self.assertIn("_id", response) self.assertEqual(response["_id"], self.POLICY_NAME) - async def test_update_policy(self): + async def test_update_policy(self) -> None: # Create a policy await self.test_create_policy() @@ -110,7 +111,7 @@ async def test_update_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - async def test_delete_policy(self): + async def test_delete_policy(self) -> None: # Create a policy await self.test_create_policy() diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index bb8509dc..0efcd25e 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -35,6 +35,7 @@ import warnings import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import OpenSearchWarning from opensearchpy.helpers.test import _get_version @@ -47,7 +48,7 @@ YamlRunner, ) -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio OPENSEARCH_VERSION = None @@ -77,7 +78,7 @@ async def setup(self): if self._setup_code: await self.run_code(self._setup_code) - async def teardown(self): + async def teardown(self) -> None: if self._teardown_code: self.section("teardown") await self.run_code(self._teardown_code) @@ -92,10 +93,10 @@ async def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 999 for v in version) return OPENSEARCH_VERSION - def section(self, name): + def section(self, name) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - async def run(self): + async def run(self) -> None: try: await self.setup() self.section("test") @@ -106,7 +107,7 @@ async def run(self): except Exception: pass - async def run_code(self, test): + async def run_code(self, test) -> None: """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 @@ -118,7 +119,7 @@ async def run_code(self, test): else: raise RuntimeError("Invalid action type %r" % (action_type,)) - async def run_do(self, action): + async def run_do(self, action) -> None: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -184,7 +185,7 @@ async def run_do(self, action): % (warn, caught_warnings) ) - async def run_skip(self, skip): + async def run_skip(self, skip) -> None: if "features" in skip: features = skip["features"] if not isinstance(features, (tuple, list)): @@ -204,7 +205,7 @@ async def run_skip(self, skip): if min_version <= (await self.opensearch_version()) <= max_version: pytest.skip(reason) - async def _feature_enabled(self, name): + async def _feature_enabled(self, name) -> bool: return False @@ -216,7 +217,7 @@ def async_runner(async_client): if RUN_ASYNC_REST_API_TESTS: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - async def test_rest_api_spec(test_spec, async_runner): + async def test_rest_api_spec(test_spec, async_runner) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") async_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 9b1f7a5f..9fe8d9d1 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -14,12 +14,13 @@ from unittest import IsolatedAsyncioTestCase import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.test import get_test_client from opensearchpy.connection.async_connections import add_connection from opensearchpy.exceptions import NotFoundError -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestSecurityPlugin(IsolatedAsyncioTestCase): @@ -40,17 +41,17 @@ class TestSecurityPlugin(IsolatedAsyncioTestCase): USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - async def asyncSetUp(self): + async def asyncSetUp(self) -> None: self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self): + async def asyncTearDown(self) -> None: if self.client: await self.client.close() - async def test_create_role(self): + async def test_create_role(self) -> None: # Test to create role response = await self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -59,7 +60,7 @@ async def test_create_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_create_role_with_body_param_empty(self): + async def test_create_role_with_body_param_empty(self) -> None: try: await self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -67,7 +68,7 @@ async def test_create_role_with_body_param_empty(self): else: assert False - async def test_get_role(self): + async def test_get_role(self) -> None: # Create a role await self.test_create_role() @@ -77,7 +78,7 @@ async def test_get_role(self): self.assertNotIn("errors", response) self.assertIn(self.ROLE_NAME, response) - async def test_update_role(self): + async def test_update_role(self) -> None: # Create a role await self.test_create_role() @@ -92,7 +93,7 @@ async def test_update_role(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_delete_role(self): + async def test_delete_role(self) -> None: # Create a role await self.test_create_role() @@ -105,7 +106,7 @@ async def test_delete_role(self): with self.assertRaises(NotFoundError): response = await self.client.security.get_role(self.ROLE_NAME) - async def test_create_user(self): + async def test_create_user(self) -> None: # Test to create user response = await self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -114,7 +115,7 @@ async def test_create_user(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_create_user_with_body_param_empty(self): + async def test_create_user_with_body_param_empty(self) -> None: try: await self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -137,7 +138,7 @@ async def test_create_user_with_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_get_user(self): + async def test_get_user(self) -> None: # Create a user await self.test_create_user() @@ -147,7 +148,7 @@ async def test_get_user(self): self.assertNotIn("errors", response) self.assertIn(self.USER_NAME, response) - async def test_update_user(self): + async def test_update_user(self) -> None: # Create a user await self.test_create_user() @@ -162,7 +163,7 @@ async def test_update_user(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_delete_user(self): + async def test_delete_user(self) -> None: # Create a user await self.test_create_user() @@ -175,12 +176,12 @@ async def test_delete_user(self): with self.assertRaises(NotFoundError): response = await self.client.security.get_user(self.USER_NAME) - async def test_health_check(self): + async def test_health_check(self) -> None: response = await self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) - async def test_health(self): + async def test_health(self) -> None: response = await self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -213,14 +214,14 @@ async def test_health(self): }, } - async def test_update_audit_config(self): + async def test_update_audit_config(self) -> None: response = await self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_update_audit_configuration(self): + async def test_update_audit_configuration(self) -> None: response = await self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 84458c9e..50d734bc 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -11,9 +11,10 @@ import uuid import pytest +from _pytest.mark.structures import MarkDecorator from mock import Mock -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAsyncSigner: @@ -30,7 +31,7 @@ def mock_session(self): return dummy_session - async def test_aws_signer_async_as_http_auth(self): + async def test_aws_signer_async_as_http_auth(self) -> None: region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -41,7 +42,7 @@ async def test_aws_signer_async_as_http_auth(self): assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers - async def test_aws_signer_async_when_region_is_null(self): + async def test_aws_signer_async_when_region_is_null(self) -> None: session = self.mock_session() from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -54,7 +55,7 @@ async def test_aws_signer_async_when_region_is_null(self): AWSV4SignerAsyncAuth(session, "") assert str(e.value) == "Region cannot be empty" - async def test_aws_signer_async_when_credentials_is_null(self): + async def test_aws_signer_async_when_credentials_is_null(self) -> None: region = "us-west-1" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -63,7 +64,7 @@ async def test_aws_signer_async_when_credentials_is_null(self): AWSV4SignerAsyncAuth(None, region) assert str(e.value) == "Credentials cannot be empty" - async def test_aws_signer_async_when_service_is_specified(self): + async def test_aws_signer_async_when_service_is_specified(self) -> None: region = "us-west-2" service = "aoss" @@ -78,7 +79,7 @@ async def test_aws_signer_async_when_service_is_specified(self): class TestAsyncSignerWithFrozenCredentials(TestAsyncSigner): - def mock_session(self, disable_get_frozen=True): + def mock_session(self, disable_get_frozen: bool = True): access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -90,7 +91,7 @@ def mock_session(self, disable_get_frozen=True): return dummy_session - async def test_aws_signer_async_frozen_credentials_as_http_auth(self): + async def test_aws_signer_async_frozen_credentials_as_http_auth(self) -> None: region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index fc018e43..4dabee05 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -30,8 +30,10 @@ import asyncio import json +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from mock import patch from opensearchpy import AIOHttpConnection, AsyncTransport @@ -39,11 +41,11 @@ from opensearchpy.connection_pool import DummyConnectionPool from opensearchpy.exceptions import ConnectionError, TransportError -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class DummyConnection(Connection): - def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) @@ -52,7 +54,7 @@ def __init__(self, **kwargs): self.closed = False super(DummyConnection, self).__init__(**kwargs) - async def perform_request(self, *args, **kwargs): + async def perform_request(self, *args, **kwargs) -> Any: if self.closed: raise RuntimeError("This connection is closed") if self.delay: @@ -62,7 +64,7 @@ async def perform_request(self, *args, **kwargs): raise self.exception return self.status, self.headers, self.data - async def close(self): + async def close(self) -> None: if self.closed: raise RuntimeError("This connection is already closed") self.closed = True @@ -120,7 +122,7 @@ async def close(self): class TestTransport: - async def test_single_connection_uses_dummy_connection_pool(self): + async def test_single_connection_uses_dummy_connection_pool(self) -> None: t = AsyncTransport([{}]) await t._async_call() assert isinstance(t.connection_pool, DummyConnectionPool) @@ -128,7 +130,7 @@ async def test_single_connection_uses_dummy_connection_pool(self): await t._async_call() assert isinstance(t.connection_pool, DummyConnectionPool) - async def test_request_timeout_extracted_from_params_and_passed(self): + async def test_request_timeout_extracted_from_params_and_passed(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -140,7 +142,7 @@ async def test_request_timeout_extracted_from_params_and_passed(self): "headers": None, } == t.get_connection().calls[0][1] - async def test_timeout_extracted_from_params_and_passed(self): + async def test_timeout_extracted_from_params_and_passed(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"timeout": 84}) @@ -187,7 +189,7 @@ async def test_request_with_custom_user_agent_header(self): "headers": {"user-agent": "my-custom-value/1.2.3"}, } == t.get_connection().calls[0][1] - async def test_send_get_body_as_source(self): + async def test_send_get_body_as_source(self) -> None: t = AsyncTransport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -196,7 +198,7 @@ async def test_send_get_body_as_source(self): assert 1 == len(t.get_connection().calls) assert ("GET", "/", {"source": "{}"}, None) == t.get_connection().calls[0][0] - async def test_send_get_body_as_post(self): + async def test_send_get_body_as_post(self) -> None: t = AsyncTransport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -205,7 +207,7 @@ async def test_send_get_body_as_post(self): assert 1 == len(t.get_connection().calls) assert ("POST", "/", None, b"{}") == t.get_connection().calls[0][0] - async def test_body_gets_encoded_into_bytes(self): + async def test_body_gets_encoded_into_bytes(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好") @@ -217,7 +219,7 @@ async def test_body_gets_encoded_into_bytes(self): b"\xe4\xbd\xa0\xe5\xa5\xbd", ) == t.get_connection().calls[0][0] - async def test_body_bytes_get_passed_untouched(self): + async def test_body_bytes_get_passed_untouched(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -225,7 +227,7 @@ async def test_body_bytes_get_passed_untouched(self): assert 1 == len(t.get_connection().calls) assert ("GET", "/", None, body) == t.get_connection().calls[0][0] - async def test_body_surrogates_replaced_encoded_into_bytes(self): + async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好\uda6a") @@ -237,19 +239,19 @@ async def test_body_surrogates_replaced_encoded_into_bytes(self): b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa", ) == t.get_connection().calls[0][0] - async def test_kwargs_passed_on_to_connections(self): + async def test_kwargs_passed_on_to_connections(self) -> None: t = AsyncTransport([{"host": "google.com"}], port=123) await t._async_call() assert 1 == len(t.connection_pool.connections) assert "http://google.com:123" == t.connection_pool.connections[0].host - async def test_kwargs_passed_on_to_connection_pool(self): + async def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() t = AsyncTransport([{}, {}], dead_timeout=dt) await t._async_call() assert dt is t.connection_pool.dead_timeout - async def test_custom_connection_class(self): + async def test_custom_connection_class(self) -> None: class MyConnection(object): def __init__(self, **kwargs): self.kwargs = kwargs @@ -259,14 +261,14 @@ def __init__(self, **kwargs): assert 1 == len(t.connection_pool.connections) assert isinstance(t.connection_pool.connections[0], MyConnection) - async def test_add_connection(self): + async def test_add_connection(self) -> None: t = AsyncTransport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) assert 2 == len(t.connection_pool.connections) assert "http://google.com:1234" == t.connection_pool.connections[1].host - async def test_request_will_fail_after_X_retries(self): + async def test_request_will_fail_after_X_retries(self) -> None: t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, @@ -281,7 +283,7 @@ async def test_request_will_fail_after_X_retries(self): assert connection_error assert 4 == len(t.get_connection().calls) - async def test_failed_connection_will_be_marked_as_dead(self): + async def test_failed_connection_will_be_marked_as_dead(self) -> None: t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, @@ -296,7 +298,9 @@ async def test_failed_connection_will_be_marked_as_dead(self): assert connection_error assert 0 == len(t.connection_pool.connections) - async def test_resurrected_connection_will_be_marked_as_live_on_success(self): + async def test_resurrected_connection_will_be_marked_as_live_on_success( + self, + ) -> None: for method in ("GET", "HEAD"): t = AsyncTransport([{}, {}], connection_class=DummyConnection) await t._async_call() @@ -309,7 +313,7 @@ async def test_resurrected_connection_will_be_marked_as_live_on_success(self): assert 1 == len(t.connection_pool.connections) assert 1 == len(t.connection_pool.dead_count) - async def test_sniff_will_use_seed_connections(self): + async def test_sniff_will_use_seed_connections(self) -> None: t = AsyncTransport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) await t._async_call() t.set_connections([{"data": "invalid"}]) @@ -318,7 +322,7 @@ async def test_sniff_will_use_seed_connections(self): assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host - async def test_sniff_on_start_fetches_and_uses_nodes_list(self): + async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -330,7 +334,7 @@ async def test_sniff_on_start_fetches_and_uses_nodes_list(self): assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host - async def test_sniff_on_start_ignores_sniff_timeout(self): + async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -344,7 +348,7 @@ async def test_sniff_on_start_ignores_sniff_timeout(self): 0 ].calls[0] - async def test_sniff_uses_sniff_timeout(self): + async def test_sniff_uses_sniff_timeout(self) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -412,7 +416,7 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts) assert 1 == len(conn_err.calls) assert 1 == len(conn_data.calls) - async def test_sniff_after_n_seconds(self, event_loop): + async def test_sniff_after_n_seconds(self, event_loop) -> None: t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -433,7 +437,7 @@ async def test_sniff_after_n_seconds(self, event_loop): assert "http://1.1.1.1:123" == t.get_connection().host assert event_loop.time() - 1 < t.last_sniff < event_loop.time() + 0.01 - async def test_sniff_7x_publish_host(self): + async def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t = AsyncTransport( @@ -449,7 +453,7 @@ async def test_sniff_7x_publish_host(self): "port": 123, } - async def test_transport_close_closes_all_pool_connections(self): + async def test_transport_close_closes_all_pool_connections(self) -> None: t = AsyncTransport([{}], connection_class=DummyConnection) await t._async_call() @@ -464,7 +468,7 @@ async def test_transport_close_closes_all_pool_connections(self): await t.close() assert all([conn.closed for conn in t.connection_pool.connections]) - async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop): + async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop) -> None: t = AsyncTransport( [ {"data": ""}, @@ -544,7 +548,7 @@ async def test_sniff_on_start_close_unlocks_async_calls(self, event_loop): # A lot quicker than 10 seconds defined in 'delay' assert duration < 1 - async def test_init_connection_pool_with_many_hosts(self): + async def test_init_connection_pool_with_many_hosts(self) -> None: """ Check init of connection pool with multiple connections. @@ -562,7 +566,7 @@ async def test_init_connection_pool_with_many_hosts(self): assert len(t.connection_pool.connections) == amt_hosts await t._async_call() - async def test_init_pool_with_connection_class_to_many_hosts(self): + async def test_init_pool_with_connection_class_to_many_hosts(self) -> None: """ Check init of connection pool with user specified connection_class. diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index 2a5ad5a3..ad795bcf 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -34,7 +34,7 @@ class DummyTransport(object): - def __init__(self, hosts, responses=None, **kwargs): + def __init__(self, hosts, responses=None, **kwargs) -> None: self.hosts = hosts self.responses = responses self.call_count = 0 @@ -50,14 +50,14 @@ def perform_request(self, method, url, params=None, headers=None, body=None): class OpenSearchTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super(OpenSearchTestCase, self).setUp() self.client = OpenSearch(transport_class=DummyTransport) - def assert_call_count_equals(self, count): + def assert_call_count_equals(self, count) -> None: self.assertEqual(count, self.client.transport.call_count) - def assert_url_called(self, method, url, count=1): + def assert_url_called(self, method, url, count: int = 1): self.assertIn((method, url), self.client.transport.calls) calls = self.client.transport.calls[(method, url)] self.assertEqual(count, len(calls)) @@ -65,13 +65,13 @@ def assert_url_called(self, method, url, count=1): class TestOpenSearchTestCase(OpenSearchTestCase): - def test_our_transport_used(self): + def test_our_transport_used(self) -> None: self.assertIsInstance(self.client.transport, DummyTransport) - def test_start_with_0_call(self): + def test_start_with_0_call(self) -> None: self.assert_call_count_equals(0) - def test_each_call_is_recorded(self): + def test_each_call_is_recorded(self) -> None: self.client.transport.perform_request("GET", "/") self.client.transport.perform_request("DELETE", "/42", params={}, body="body") self.assert_call_count_equals(2) diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index ecbd769a..3174772e 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -30,19 +30,20 @@ import warnings -from opensearchpy.client import OpenSearch, _normalize_hosts +from opensearchpy.client import OpenSearch +from opensearchpy.client.utils import _normalize_hosts from ..test_cases import OpenSearchTestCase, TestCase class TestNormalizeHosts(TestCase): - def test_none_uses_defaults(self): + def test_none_uses_defaults(self) -> None: self.assertEqual([{}], _normalize_hosts(None)) - def test_strings_are_used_as_hostnames(self): + def test_strings_are_used_as_hostnames(self) -> None: self.assertEqual([{"host": "elastic.co"}], _normalize_hosts(["elastic.co"])) - def test_strings_are_parsed_for_port_and_user(self): + def test_strings_are_parsed_for_port_and_user(self) -> None: self.assertEqual( [ {"host": "elastic.co", "port": 42}, @@ -51,7 +52,7 @@ def test_strings_are_parsed_for_port_and_user(self): _normalize_hosts(["elastic.co:42", "user:secre%5D@elastic.co"]), ) - def test_strings_are_parsed_for_scheme(self): + def test_strings_are_parsed_for_scheme(self) -> None: self.assertEqual( [ {"host": "elastic.co", "port": 42, "use_ssl": True}, @@ -68,23 +69,23 @@ def test_strings_are_parsed_for_scheme(self): ), ) - def test_dicts_are_left_unchanged(self): + def test_dicts_are_left_unchanged(self) -> None: self.assertEqual( [{"host": "local", "extra": 123}], _normalize_hosts([{"host": "local", "extra": 123}]), ) - def test_single_string_is_wrapped_in_list(self): + def test_single_string_is_wrapped_in_list(self) -> None: self.assertEqual([{"host": "elastic.co"}], _normalize_hosts("elastic.co")) class TestClient(OpenSearchTestCase): - def test_request_timeout_is_passed_through_unescaped(self): + def test_request_timeout_is_passed_through_unescaped(self) -> None: self.client.ping(request_timeout=0.1) calls = self.assert_url_called("HEAD", "/") self.assertEqual([({"request_timeout": 0.1}, {}, None)], calls) - def test_params_is_copied_when(self): + def test_params_is_copied_when(self) -> None: rt = object() params = dict(request_timeout=rt) self.client.ping(params=params) @@ -96,7 +97,7 @@ def test_params_is_copied_when(self): ) self.assertFalse(calls[0][0] is calls[1][0]) - def test_headers_is_copied_when(self): + def test_headers_is_copied_when(self) -> None: hv = "value" headers = dict(Authentication=hv) self.client.ping(headers=headers) @@ -108,40 +109,40 @@ def test_headers_is_copied_when(self): ) self.assertFalse(calls[0][0] is calls[1][0]) - def test_from_in_search(self): + def test_from_in_search(self) -> None: self.client.search(index="i", from_=10) calls = self.assert_url_called("POST", "/i/_search") self.assertEqual([({"from": "10"}, {}, None)], calls) - def test_repr_contains_hosts(self): + def test_repr_contains_hosts(self) -> None: self.assertEqual("", repr(self.client)) - def test_repr_subclass(self): + def test_repr_subclass(self) -> None: class OtherOpenSearch(OpenSearch): pass self.assertEqual("", repr(OtherOpenSearch())) - def test_repr_contains_hosts_passed_in(self): + def test_repr_contains_hosts_passed_in(self) -> None: self.assertIn("opensearchpy.org", repr(OpenSearch(["opensearch.org:123"]))) - def test_repr_truncates_host_to_5(self): + def test_repr_truncates_host_to_5(self) -> None: hosts = [{"host": "opensearch" + str(i)} for i in range(10)] client = OpenSearch(hosts) self.assertNotIn("opensearch5", repr(client)) self.assertIn("...", repr(client)) - def test_index_uses_post_if_id_is_empty(self): + def test_index_uses_post_if_id_is_empty(self) -> None: self.client.index(index="my-index", id="", body={}) self.assert_url_called("POST", "/my-index/_doc") - def test_index_uses_put_if_id_is_not_empty(self): + def test_index_uses_put_if_id_is_not_empty(self) -> None: self.client.index(index="my-index", id=0, body={}) self.assert_url_called("PUT", "/my-index/_doc/0") - def test_tasks_get_without_task_id_deprecated(self): + def test_tasks_get_without_task_id_deprecated(self) -> None: warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get() @@ -155,7 +156,7 @@ def test_tasks_get_without_task_id_deprecated(self): "and will be removed in v8.0. Use client.tasks.list() instead.", ) - def test_tasks_get_with_task_id_not_deprecated(self): + def test_tasks_get_with_task_id_not_deprecated(self) -> None: warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get("task-1") diff --git a/test_opensearchpy/test_client/test_cluster.py b/test_opensearchpy/test_client/test_cluster.py index a66072cd..f170a448 100644 --- a/test_opensearchpy/test_client/test_cluster.py +++ b/test_opensearchpy/test_client/test_cluster.py @@ -30,18 +30,18 @@ class TestCluster(OpenSearchTestCase): - def test_stats_without_node_id(self): + def test_stats_without_node_id(self) -> None: self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") - def test_stats_with_node_id(self): + def test_stats_with_node_id(self) -> None: self.client.cluster.stats("node-1") self.assert_url_called("GET", "/_cluster/stats/nodes/node-1") self.client.cluster.stats(node_id="node-2") self.assert_url_called("GET", "/_cluster/stats/nodes/node-2") - def test_state_with_index_without_metric_defaults_to_all(self): + def test_state_with_index_without_metric_defaults_to_all(self) -> None: self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") diff --git a/test_opensearchpy/test_client/test_indices.py b/test_opensearchpy/test_client/test_indices.py index f3e48f1b..668eebd7 100644 --- a/test_opensearchpy/test_client/test_indices.py +++ b/test_opensearchpy/test_client/test_indices.py @@ -30,19 +30,19 @@ class TestIndices(OpenSearchTestCase): - def test_create_one_index(self): + def test_create_one_index(self) -> None: self.client.indices.create("test-index") self.assert_url_called("PUT", "/test-index") - def test_delete_multiple_indices(self): + def test_delete_multiple_indices(self) -> None: self.client.indices.delete(["test-index", "second.index", "third/index"]) self.assert_url_called("DELETE", "/test-index,second.index,third%2Findex") - def test_exists_index(self): + def test_exists_index(self) -> None: self.client.indices.exists("second.index,third/index") self.assert_url_called("HEAD", "/second.index,third%2Findex") - def test_passing_empty_value_for_required_param_raises_exception(self): + def test_passing_empty_value_for_required_param_raises_exception(self) -> None: self.assertRaises(ValueError, self.client.indices.exists, index=None) self.assertRaises(ValueError, self.client.indices.exists, index=[]) self.assertRaises(ValueError, self.client.indices.exists, index="") diff --git a/test_opensearchpy/test_client/test_overrides.py b/test_opensearchpy/test_client/test_overrides.py index 4ce0931e..16cb3ab4 100644 --- a/test_opensearchpy/test_client/test_overrides.py +++ b/test_opensearchpy/test_client/test_overrides.py @@ -32,57 +32,57 @@ class TestOverriddenUrlTargets(OpenSearchTestCase): - def test_create(self): + def test_create(self) -> None: self.client.create(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_create/test-id") - def test_delete(self): + def test_delete(self) -> None: self.client.delete(index="test-index", id="test-id") self.assert_url_called("DELETE", "/test-index/_doc/test-id") - def test_exists(self): + def test_exists(self) -> None: self.client.exists(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_doc/test-id") - def test_explain(self): + def test_explain(self) -> None: self.client.explain(index="test-index", id="test-id") self.assert_url_called("POST", "/test-index/_explain/test-id") - def test_get(self): + def test_get(self) -> None: self.client.get(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_doc/test-id") - def test_get_source(self): + def test_get_source(self) -> None: self.client.get_source(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_source/test-id") - def test_exists_source(self): + def test_exists_source(self) -> None: self.client.exists_source(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_source/test-id") - def test_index(self): + def test_index(self) -> None: self.client.index(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_doc") self.client.index(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_doc/test-id") - def test_termvectors(self): + def test_termvectors(self) -> None: self.client.termvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_termvectors") self.client.termvectors(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_termvectors/test-id") - def test_mtermvectors(self): + def test_mtermvectors(self) -> None: self.client.mtermvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_mtermvectors") - def test_update(self): + def test_update(self) -> None: self.client.update(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_update/test-id") - def test_cluster_state(self): + def test_cluster_state(self) -> None: self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") @@ -92,20 +92,20 @@ def test_cluster_state(self): self.client.cluster.state(index="test-index", metric="test-metric") self.assert_url_called("GET", "/_cluster/state/test-metric/test-index") - def test_cluster_stats(self): + def test_cluster_stats(self) -> None: self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") self.client.cluster.stats(node_id="test-node") self.assert_url_called("GET", "/_cluster/stats/nodes/test-node") - def test_indices_put_mapping(self): + def test_indices_put_mapping(self) -> None: self.client.indices.put_mapping(body={}) self.assert_url_called("PUT", "/_all/_mapping") self.client.indices.put_mapping(index="test-index", body={}) self.assert_url_called("PUT", "/test-index/_mapping") - def test_tasks_get(self): + def test_tasks_get(self) -> None: with pytest.warns(DeprecationWarning): self.client.tasks.get() diff --git a/test_opensearchpy/test_client/test_plugins/test_alerting.py b/test_opensearchpy/test_client/test_plugins/test_alerting.py index a59ad04e..482a4224 100644 --- a/test_opensearchpy/test_client/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_client/test_plugins/test_alerting.py @@ -12,41 +12,41 @@ class TestAlerting(OpenSearchTestCase): - def test_create_monitor(self): + def test_create_monitor(self) -> None: # Test Post Method self.client.alerting.create_monitor({}) self.assert_url_called("POST", "/_plugins/_alerting/monitors") - def test_run_monitor(self): + def test_run_monitor(self) -> None: self.client.alerting.run_monitor("...") self.assert_url_called("POST", "/_plugins/_alerting/monitors/.../_execute") - def test_get_monitor(self): + def test_get_monitor(self) -> None: # Test Get Method self.client.alerting.get_monitor("...") self.assert_url_called("GET", "/_plugins/_alerting/monitors/...") - def test_search_monitor(self): + def test_search_monitor(self) -> None: # Test Search Method self.client.alerting.search_monitor({}) self.assert_url_called("GET", "/_plugins/_alerting/monitors/_search") - def test_update_monitor(self): + def test_update_monitor(self) -> None: # Test Update Method self.client.alerting.update_monitor("...") self.assert_url_called("PUT", "/_plugins/_alerting/monitors/...") - def test_delete_monitor(self): + def test_delete_monitor(self) -> None: # Test Delete Method self.client.alerting.delete_monitor("...") self.assert_url_called("DELETE", "/_plugins/_alerting/monitors/...") - def test_create_destination(self): + def test_create_destination(self) -> None: # Test Post Method self.client.alerting.create_destination({}) self.assert_url_called("POST", "/_plugins/_alerting/destinations") - def test_get_destination(self): + def test_get_destination(self) -> None: # Test Get Method # Get a specific destination @@ -57,21 +57,21 @@ def test_get_destination(self): self.client.alerting.get_destination() self.assert_url_called("GET", "/_plugins/_alerting/destinations") - def test_update_destination(self): + def test_update_destination(self) -> None: # Test Update Method self.client.alerting.update_destination("...") self.assert_url_called("PUT", "/_plugins/_alerting/destinations/...") - def test_delete_destination(self): + def test_delete_destination(self) -> None: # Test Delete Method self.client.alerting.delete_destination("...") self.assert_url_called("DELETE", "/_plugins/_alerting/destinations/...") - def test_get_alerts(self): + def test_get_alerts(self) -> None: self.client.alerting.get_alerts() self.assert_url_called("GET", "/_plugins/_alerting/monitors/alerts") - def test_acknowledge_alerts(self): + def test_acknowledge_alerts(self) -> None: self.client.alerting.acknowledge_alert("...") self.assert_url_called( "POST", "/_plugins/_alerting/monitors/.../_acknowledge/alerts" diff --git a/test_opensearchpy/test_client/test_plugins/test_index_management.py b/test_opensearchpy/test_client/test_plugins/test_index_management.py index 2c744e19..891d6f02 100644 --- a/test_opensearchpy/test_client/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_client/test_plugins/test_index_management.py @@ -12,11 +12,11 @@ class TestIndexManagement(OpenSearchTestCase): - def test_create_policy(self): + def test_create_policy(self) -> None: self.client.index_management.put_policy("...") self.assert_url_called("PUT", "/_plugins/_ism/policies/...") - def test_update_policy(self): + def test_update_policy(self) -> None: self.client.index_management.put_policy( "...", params={"if_seq_no": 7, "if_primary_term": 1} ) @@ -25,33 +25,33 @@ def test_update_policy(self): self.assert_url_called("PUT", "/_plugins/_ism/policies/..."), ) - def test_add_policy(self): + def test_add_policy(self) -> None: self.client.index_management.add_policy("...") self.assert_url_called("POST", "/_plugins/_ism/add/...") - def test_get_policy(self): + def test_get_policy(self) -> None: self.client.index_management.get_policy("...") self.assert_url_called("GET", "/_plugins/_ism/policies/...") - def test_remove_policy_from_index(self): + def test_remove_policy_from_index(self) -> None: self.client.index_management.remove_policy_from_index("...") self.assert_url_called("POST", "/_plugins/_ism/remove/...") - def test_change_policy(self): + def test_change_policy(self) -> None: self.client.index_management.change_policy("...") self.assert_url_called("POST", "/_plugins/_ism/change_policy/...") - def test_retry(self): + def test_retry(self) -> None: self.client.index_management.retry("...") self.assert_url_called("POST", "/_plugins/_ism/retry/...") - def test_explain_index(self): + def test_explain_index(self) -> None: self.client.index_management.explain_index("...", show_policy=True) self.assertEqual( [({"show_policy": b"true"}, {}, None)], self.assert_url_called("GET", "/_plugins/_ism/explain/..."), ) - def test_delete_policy(self): + def test_delete_policy(self) -> None: self.client.index_management.delete_policy("...") self.assert_url_called("DELETE", "/_plugins/_ism/policies/...") diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py index e717d9cb..d09731bf 100644 --- a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -14,7 +14,7 @@ class TestPluginsClient(TestCase): - def test_plugins_client(self): + def test_plugins_client(self) -> None: with self.assertWarns(Warning) as w: client = OpenSearch() client.plugins.__init__(client) # double-init diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index 6ce12a46..30940ce4 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -12,36 +12,36 @@ class TestPointInTime(OpenSearchTestCase): - def test_create_one_point_in_time(self): + def test_create_one_point_in_time(self) -> None: index_name = "test-index" self.client.create_point_in_time(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") - def test_delete_one_point_in_time(self): + def test_delete_one_point_in_time(self) -> None: self.client.delete_point_in_time(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") - def test_delete_all_point_in_time(self): + def test_delete_all_point_in_time(self) -> None: self.client.delete_point_in_time(all=True) self.assert_url_called("DELETE", "/_search/point_in_time/_all") - def test_list_all_point_in_time(self): + def test_list_all_point_in_time(self) -> None: self.client.list_all_point_in_time() self.assert_url_called("GET", "/_search/point_in_time/_all") - def test_create_pit(self): + def test_create_pit(self) -> None: index_name = "test-index" self.client.create_pit(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") - def test_delete_pit(self): + def test_delete_pit(self) -> None: self.client.delete_pit(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") - def test_delete_all_pits(self): + def test_delete_all_pits(self) -> None: self.client.delete_all_pits() self.assert_url_called("DELETE", "/_search/point_in_time/_all") - def test_get_all_pits(self): + def test_get_all_pits(self) -> None: self.client.get_all_pits() self.assert_url_called("GET", "/_search/point_in_time/_all") diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py index 92265733..a9bfc894 100644 --- a/test_opensearchpy/test_client/test_remote_store.py +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -11,6 +11,6 @@ class TestRemoteStore(OpenSearchTestCase): - def test_remote_store_restore(self): + def test_remote_store_restore(self) -> None: self.client.remote_store.restore(body=["index-1"]) self.assert_url_called("POST", "/_remotestore/_restore") diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py index 3caf8d5f..66ec8cbc 100644 --- a/test_opensearchpy/test_client/test_requests.py +++ b/test_opensearchpy/test_client/test_requests.py @@ -14,7 +14,7 @@ class TestRequests(TestCase): - def test_connection_class(self): + def test_connection_class(self) -> None: client = OpenSearch(connection_class=RequestsHttpConnection) self.assertEqual(client.transport.pool_maxsize, None) self.assertEqual(client.transport.connection_class, RequestsHttpConnection) @@ -22,7 +22,7 @@ def test_connection_class(self): client.transport.connection_pool.connections[0], RequestsHttpConnection ) - def test_pool_maxsize(self): + def test_pool_maxsize(self) -> None: client = OpenSearch(connection_class=RequestsHttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) self.assertEqual( diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py index fa63133b..064c49cc 100644 --- a/test_opensearchpy/test_client/test_urllib3.py +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -16,12 +16,12 @@ class TestUrlLib3(TestCase): - def test_default(self): + def test_default(self) -> None: client = OpenSearch() self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertEqual(client.transport.pool_maxsize, None) - def test_connection_class(self): + def test_connection_class(self) -> None: client = OpenSearch(connection_class=Urllib3HttpConnection) self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) self.assertIsInstance( @@ -31,7 +31,7 @@ def test_connection_class(self): client.transport.connection_pool.connections[0].pool, HTTPConnectionPool ) - def test_pool_maxsize(self): + def test_pool_maxsize(self) -> None: client = OpenSearch(connection_class=Urllib3HttpConnection, pool_maxsize=42) self.assertEqual(client.transport.pool_maxsize, 42) # https://github.com/python/cpython/blob/3.12/Lib/queue.py#L35 diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index 888e988d..b6a034eb 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -34,14 +34,14 @@ class TestQueryParams(TestCase): - def setup_method(self, _): + def setup_method(self, _) -> None: self.calls = [] @query_params("simple_param") - def func_to_wrap(self, *args, **kwargs): + def func_to_wrap(self, *args, **kwargs) -> None: self.calls.append((args, kwargs)) - def test_handles_params(self): + def test_handles_params(self) -> None: self.func_to_wrap(params={"simple_param_2": "2"}, simple_param="3") self.assertEqual( self.calls, @@ -56,19 +56,19 @@ def test_handles_params(self): ], ) - def test_handles_headers(self): + def test_handles_headers(self) -> None: self.func_to_wrap(headers={"X-Opaque-Id": "app-1"}) self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "app-1"}})] ) - def test_handles_opaque_id(self): + def test_handles_opaque_id(self) -> None: self.func_to_wrap(opaque_id="request-id") self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "request-id"}})] ) - def test_handles_empty_none_and_normalization(self): + def test_handles_empty_none_and_normalization(self) -> None: self.func_to_wrap(params=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) @@ -84,7 +84,7 @@ def test_handles_empty_none_and_normalization(self): self.func_to_wrap(headers={"X": "y"}) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {"x": "y"}})) - def test_non_escaping_params(self): + def test_non_escaping_params(self) -> None: # the query_params decorator doesn't validate "timeout" it simply avoids escaping as it did self.func_to_wrap(simple_param="x", timeout="4s") self.assertEqual( @@ -109,7 +109,7 @@ def test_non_escaping_params(self): ), ) - def test_per_call_authentication(self): + def test_per_call_authentication(self) -> None: self.func_to_wrap(api_key=("name", "key")) self.assertEqual( self.calls[-1], @@ -154,7 +154,7 @@ def test_per_call_authentication(self): class TestMakePath(TestCase): - def test_handles_unicode(self): + def test_handles_unicode(self) -> None: id = "中文" self.assertEqual( "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) @@ -162,36 +162,36 @@ def test_handles_unicode(self): class TestEscape(TestCase): - def test_handles_ascii(self): + def test_handles_ascii(self) -> None: string = "abc123" self.assertEqual(b"abc123", _escape(string)) - def test_handles_unicode(self): + def test_handles_unicode(self) -> None: string = "中文" self.assertEqual(b"\xe4\xb8\xad\xe6\x96\x87", _escape(string)) - def test_handles_bytestring(self): + def test_handles_bytestring(self) -> None: string = b"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0" self.assertEqual(string, _escape(string)) class TestBulkBody(TestCase): - def test_proper_bulk_body_as_string_is_not_modified(self): + def test_proper_bulk_body_as_string_is_not_modified(self) -> None: string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(string_body, _bulk_body(None, string_body)) - def test_proper_bulk_body_as_bytestring_is_not_modified(self): + def test_proper_bulk_body_as_bytestring_is_not_modified(self) -> None: bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(bytestring_body, _bulk_body(None, bytestring_body)) - def test_bulk_body_as_string_adds_trailing_newline(self): + def test_bulk_body_as_string_adds_trailing_newline(self) -> None: string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', _bulk_body(None, string_body), ) - def test_bulk_body_as_bytestring_adds_trailing_newline(self): + def test_bulk_body_as_bytestring_adds_trailing_newline(self) -> None: bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py index 2c0a3fef..6ba12d0d 100644 --- a/test_opensearchpy/test_connection/test_base_connection.py +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -46,7 +46,7 @@ class TestBaseConnection(TestCase): - def test_empty_warnings(self): + def test_empty_warnings(self) -> None: con = Connection() with warnings.catch_warnings(record=True) as w: con._raise_warnings(()) @@ -54,7 +54,7 @@ def test_empty_warnings(self): self.assertEqual(w, []) - def test_raises_warnings(self): + def test_raises_warnings(self) -> None: con = Connection() with warnings.catch_warnings(record=True) as warn: @@ -76,7 +76,7 @@ def test_raises_warnings(self): ["this is also deprecated", "guess what? deprecated"], ) - def test_raises_warnings_when_folded(self): + def test_raises_warnings_when_folded(self) -> None: con = Connection() with warnings.catch_warnings(record=True) as warn: con._raise_warnings( @@ -99,7 +99,7 @@ def test_ipv6_host_and_port(self): conn = Connection(**kwargs) assert conn.host == expected_host - def test_compatibility_accept_header(self): + def test_compatibility_accept_header(self) -> None: try: conn = Connection() assert "accept" not in conn.headers @@ -119,29 +119,29 @@ def test_compatibility_accept_header(self): finally: os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") - def test_ca_certs_ssl_cert_file(self): + def test_ca_certs_ssl_cert_file(self) -> None: cert = "/path/to/clientcert.pem" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_FILE", cert) assert Connection.default_ca_certs() == cert - def test_ca_certs_ssl_cert_dir(self): + def test_ca_certs_ssl_cert_dir(self) -> None: cert = "/path/to/clientcert/dir" with MonkeyPatch().context() as monkeypatch: monkeypatch.setenv("SSL_CERT_DIR", cert) assert Connection.default_ca_certs() == cert - def test_ca_certs_certifi(self): + def test_ca_certs_certifi(self) -> None: import certifi assert Connection.default_ca_certs() == certifi.where() - def test_no_ca_certs(self): + def test_no_ca_certs(self) -> None: with MonkeyPatch().context() as monkeypatch: monkeypatch.setitem(sys.modules, "certifi", None) assert Connection.default_ca_certs() is None - def test_default_connection_is_returned_by_default(self): + def test_default_connection_is_returned_by_default(self) -> None: c = connections.Connections() con, con2 = object(), object() @@ -151,7 +151,7 @@ def test_default_connection_is_returned_by_default(self): assert c.get_connection() is con - def test_get_connection_created_connection_if_needed(self): + def test_get_connection_created_connection_if_needed(self) -> None: c = connections.Connections() c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -166,7 +166,7 @@ def test_get_connection_created_connection_if_needed(self): assert [{"host": "opensearch.com"}] == default.transport.hosts assert [{"host": "localhost"}] == local.transport.hosts - def test_configure_preserves_unchanged_connections(self): + def test_configure_preserves_unchanged_connections(self) -> None: c = connections.Connections() c.configure( @@ -184,7 +184,7 @@ def test_configure_preserves_unchanged_connections(self): assert new_local is local assert new_default is not default - def test_remove_connection_removes_both_conn_and_conf(self): + def test_remove_connection_removes_both_conn_and_conf(self) -> None: c = connections.Connections() c.configure( @@ -200,14 +200,14 @@ def test_remove_connection_removes_both_conn_and_conf(self): c.get_connection("local2") c.get_connection("default") - def test_create_connection_constructs_client(self): + def test_create_connection_constructs_client(self) -> None: c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) con = c.get_connection("testing") assert [{"host": "opensearch.com"}] == con.transport.hosts - def test_create_connection_adds_our_serializer(self): + def test_create_connection_adds_our_serializer(self) -> None: c = connections.Connections() c.create_connection("testing", hosts=["opensearch.com"]) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index c85d2efd..409981f0 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -49,7 +49,7 @@ class TestRequestsHttpConnection(TestCase): def _get_mock_connection( - self, connection_params={}, status_code=200, response_body=b"{}" + self, connection_params={}, status_code: int = 200, response_body: bytes = b"{}" ): con = RequestsHttpConnection(**connection_params) @@ -80,21 +80,21 @@ def _get_request(self, connection, *args, **kwargs): self.assertEqual(1, len(args)) return args[0] - def test_custom_http_auth_is_allowed(self): + def test_custom_http_auth_is_allowed(self) -> None: auth = AuthBase() c = RequestsHttpConnection(http_auth=auth) self.assertEqual(auth, c.session.auth) - def test_timeout_set(self): + def test_timeout_set(self) -> None: con = RequestsHttpConnection(timeout=42) self.assertEqual(42, con.timeout) - def test_opaque_id(self): + def test_opaque_id(self) -> None: con = RequestsHttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") - def test_no_http_compression(self): + def test_no_http_compression(self) -> None: con = self._get_mock_connection() self.assertFalse(con.http_compress) @@ -106,7 +106,7 @@ def test_no_http_compression(self): self.assertNotIn("content-encoding", req.headers) self.assertNotIn("accept-encoding", req.headers) - def test_http_compression(self): + def test_http_compression(self) -> None: con = self._get_mock_connection( {"http_compress": True}, ) @@ -129,7 +129,7 @@ def test_http_compression(self): self.assertNotIn("content-encoding", req.headers) self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") - def test_uses_https_if_verify_certs_is_off(self): + def test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( {"use_ssl": True, "url_prefix": "url", "verify_certs": False} @@ -146,20 +146,20 @@ def test_uses_https_if_verify_certs_is_off(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_uses_given_ca_certs(self): + def test_uses_given_ca_certs(self) -> None: path = "/path/to/my/ca_certs.pem" c = RequestsHttpConnection(ca_certs=path) self.assertEqual(path, c.session.verify) - def test_uses_default_ca_certs(self): + def test_uses_default_ca_certs(self) -> None: c = RequestsHttpConnection() self.assertEqual(Connection.default_ca_certs(), c.session.verify) - def test_uses_no_ca_certs(self): + def test_uses_no_ca_certs(self) -> None: c = RequestsHttpConnection(verify_certs=False) self.assertFalse(c.session.verify) - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): + def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( { @@ -177,7 +177,7 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_merge_headers(self): + def test_merge_headers(self) -> None: con = self._get_mock_connection( connection_params={"headers": {"h1": "v1", "h2": "v2"}} ) @@ -186,13 +186,13 @@ def test_merge_headers(self): self.assertEqual(req.headers["h2"], "v2p") self.assertEqual(req.headers["h3"], "v3") - def test_default_headers(self): + def test_default_headers(self) -> None: con = self._get_mock_connection() req = self._get_request(con, "GET", "/") self.assertEqual(req.headers["content-type"], "application/json") self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) - def test_custom_headers(self): + def test_custom_headers(self) -> None: con = self._get_mock_connection() req = self._get_request( con, @@ -206,45 +206,45 @@ def test_custom_headers(self): self.assertEqual(req.headers["content-type"], "application/x-ndjson") self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") - def test_http_auth(self): + def test_http_auth(self) -> None: con = RequestsHttpConnection(http_auth="username:secret") self.assertEqual(("username", "secret"), con.session.auth) - def test_http_auth_tuple(self): + def test_http_auth_tuple(self) -> None: con = RequestsHttpConnection(http_auth=("username", "secret")) self.assertEqual(("username", "secret"), con.session.auth) - def test_http_auth_list(self): + def test_http_auth_list(self) -> None: con = RequestsHttpConnection(http_auth=["username", "secret"]) self.assertEqual(("username", "secret"), con.session.auth) - def test_repr(self): + def test_repr(self) -> None: con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) self.assertEqual( "", repr(con) ) - def test_conflict_error_is_returned_on_409(self): + def test_conflict_error_is_returned_on_409(self) -> None: con = self._get_mock_connection(status_code=409) self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") - def test_not_found_error_is_returned_on_404(self): + def test_not_found_error_is_returned_on_404(self) -> None: con = self._get_mock_connection(status_code=404) self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") - def test_request_error_is_returned_on_400(self): + def test_request_error_is_returned_on_400(self) -> None: con = self._get_mock_connection(status_code=400) self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") @patch("opensearchpy.connection.base.logger") - def test_head_with_404_doesnt_get_logged(self, logger): + def test_head_with_404_doesnt_get_logged(self, logger) -> None: con = self._get_mock_connection(status_code=404) self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") self.assertEqual(0, logger.warning.call_count) @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") - def test_failed_request_logs_and_traces(self, logger, tracer): + def test_failed_request_logs_and_traces(self, logger, tracer) -> None: con = self._get_mock_connection( response_body=b'{"answer": 42}', status_code=500 ) @@ -272,7 +272,7 @@ def test_failed_request_logs_and_traces(self, logger, tracer): @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") - def test_success_logs_and_traces(self, logger, tracer): + def test_success_logs_and_traces(self, logger, tracer) -> None: con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") status, headers, data = con.perform_request( "GET", @@ -311,7 +311,7 @@ def test_success_logs_and_traces(self, logger, tracer): self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): + def test_uncompressed_body_logged(self, logger) -> None: con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -333,7 +333,7 @@ def test_uncompressed_body_logged(self, logger): self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) - def test_defaults(self): + def test_defaults(self) -> None: con = self._get_mock_connection() request = self._get_request(con, "GET", "/") @@ -341,7 +341,7 @@ def test_defaults(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_params_properly_encoded(self): + def test_params_properly_encoded(self) -> None: con = self._get_mock_connection() request = self._get_request( con, "GET", "/", params={"param": "value with spaces"} @@ -351,7 +351,7 @@ def test_params_properly_encoded(self): self.assertEqual("GET", request.method) self.assertEqual(None, request.body) - def test_body_attached(self): + def test_body_attached(self) -> None: con = self._get_mock_connection() request = self._get_request(con, "GET", "/", body='{"answer": 42}') @@ -359,14 +359,14 @@ def test_body_attached(self): self.assertEqual("GET", request.method) self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) - def test_http_auth_attached(self): + def test_http_auth_attached(self) -> None: con = self._get_mock_connection({"http_auth": "username:secret"}) request = self._get_request(con, "GET", "/") self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") @patch("opensearchpy.connection.base.tracer") - def test_url_prefix(self, tracer): + def test_url_prefix(self, tracer) -> None: con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) request = self._get_request( con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 @@ -383,13 +383,13 @@ def test_url_prefix(self, tracer): tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], ) - def test_surrogatepass_into_bytes(self): + def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip - def test_recursion_error_reraised(self): + def test_recursion_error_reraised(self) -> None: conn = RequestsHttpConnection() def send_raise(*_, **__): @@ -413,7 +413,7 @@ def mock_session(self): return dummy_session - def test_aws_signer_as_http_auth(self): + def test_aws_signer_as_http_auth(self) -> None: region = "us-west-2" import requests @@ -430,7 +430,7 @@ def test_aws_signer_as_http_auth(self): self.assertIn("X-Amz-Security-Token", prepared_request.headers) self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) - def test_aws_signer_when_service_is_specified(self): + def test_aws_signer_when_service_is_specified(self) -> None: region = "us-west-1" service = "aoss" @@ -448,7 +448,7 @@ def test_aws_signer_when_service_is_specified(self): self.assertIn("X-Amz-Security-Token", prepared_request.headers) @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") - def test_aws_signer_signs_with_query_string(self, mock_sign): + def test_aws_signer_signs_with_query_string(self, mock_sign) -> None: region = "us-west-1" service = "aoss" @@ -470,7 +470,7 @@ def test_aws_signer_signs_with_query_string(self, mock_sign): class TestRequestsConnectionRedirect: @classmethod - def setup_class(cls): + def setup_class(cls) -> None: # Start servers cls.server1 = TestHTTPServer(port=8080) cls.server1.start() @@ -478,20 +478,20 @@ def setup_class(cls): cls.server2.start() @classmethod - def teardown_class(cls): + def teardown_class(cls) -> None: # Stop servers cls.server2.stop() cls.server1.stop() # allow_redirects = False - def test_redirect_failure_when_allow_redirect_false(self): + def test_redirect_failure_when_allow_redirect_false(self) -> None: conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) with pytest.raises(TransportError) as e: conn.perform_request("GET", "/redirect", allow_redirects=False) assert e.value.status_code == 302 # allow_redirects = True (Default) - def test_redirect_success_when_allow_redirect_true(self): + def test_redirect_success_when_allow_redirect_true(self) -> None: conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) user_agent = conn._get_default_user_agent() status, headers, data = conn.perform_request("GET", "/redirect") @@ -517,7 +517,9 @@ def mock_session(self): return dummy_session - def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth(self): + def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth( + self, + ) -> None: region = "us-west-2" import requests diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 929258fd..c87d8ac0 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -45,7 +45,7 @@ class TestUrllib3HttpConnection(TestCase): - def _get_mock_connection(self, connection_params={}, response_body=b"{}"): + def _get_mock_connection(self, connection_params={}, response_body: bytes = b"{}"): con = Urllib3HttpConnection(**connection_params) def _dummy_urlopen(*args, **kwargs): @@ -59,7 +59,7 @@ def _dummy_urlopen(*args, **kwargs): con.pool.urlopen = _dummy_urlopen return con - def test_ssl_context(self): + def test_ssl_context(self) -> None: try: context = ssl.create_default_context() except AttributeError: @@ -75,11 +75,11 @@ def test_ssl_context(self): self.assertIsInstance(con.pool.conn_kw["ssl_context"], ssl.SSLContext) self.assertTrue(con.use_ssl) - def test_opaque_id(self): + def test_opaque_id(self) -> None: con = Urllib3HttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") - def test_no_http_compression(self): + def test_no_http_compression(self) -> None: con = self._get_mock_connection() self.assertFalse(con.http_compress) self.assertNotIn("accept-encoding", con.headers) @@ -92,7 +92,7 @@ def test_no_http_compression(self): self.assertNotIn("accept-encoding", kwargs["headers"]) self.assertNotIn("content-encoding", kwargs["headers"]) - def test_http_compression(self): + def test_http_compression(self) -> None: con = self._get_mock_connection({"http_compress": True}) self.assertTrue(con.http_compress) self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") @@ -119,18 +119,18 @@ def test_http_compression(self): self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") self.assertNotIn("content-encoding", kwargs["headers"]) - def test_default_user_agent(self): + def test_default_user_agent(self) -> None: con = Urllib3HttpConnection() self.assertEqual( con._get_default_user_agent(), "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()), ) - def test_timeout_set(self): + def test_timeout_set(self) -> None: con = Urllib3HttpConnection(timeout=42) self.assertEqual(42, con.timeout) - def test_keep_alive_is_on_by_default(self): + def test_keep_alive_is_on_by_default(self) -> None: con = Urllib3HttpConnection() self.assertEqual( { @@ -141,7 +141,7 @@ def test_keep_alive_is_on_by_default(self): con.headers, ) - def test_http_auth(self): + def test_http_auth(self) -> None: con = Urllib3HttpConnection(http_auth="username:secret") self.assertEqual( { @@ -153,7 +153,7 @@ def test_http_auth(self): con.headers, ) - def test_http_auth_tuple(self): + def test_http_auth_tuple(self) -> None: con = Urllib3HttpConnection(http_auth=("username", "secret")) self.assertEqual( { @@ -165,7 +165,7 @@ def test_http_auth_tuple(self): con.headers, ) - def test_http_auth_list(self): + def test_http_auth_list(self) -> None: con = Urllib3HttpConnection(http_auth=["username", "secret"]) self.assertEqual( { @@ -181,7 +181,7 @@ def test_http_auth_list(self): "urllib3.HTTPConnectionPool.urlopen", return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), ) - def test_aws_signer_as_http_auth_adds_headers(self, mock_open): + def test_aws_signer_as_http_auth_adds_headers(self, mock_open) -> None: from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") @@ -197,7 +197,7 @@ def test_aws_signer_as_http_auth_adds_headers(self, mock_open): self.assertIn("X-Amz-Security-Token", headers) self.assertIn("X-Amz-Content-SHA256", headers) - def test_aws_signer_as_http_auth(self): + def test_aws_signer_as_http_auth(self) -> None: region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -209,7 +209,7 @@ def test_aws_signer_as_http_auth(self): self.assertIn("X-Amz-Security-Token", headers) self.assertIn("X-Amz-Content-SHA256", headers) - def test_aws_signer_when_region_is_null(self): + def test_aws_signer_when_region_is_null(self) -> None: session = self.mock_session() from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -222,7 +222,7 @@ def test_aws_signer_when_region_is_null(self): Urllib3AWSV4SignerAuth(session, "") assert str(e.value) == "Region cannot be empty" - def test_aws_signer_when_credentials_is_null(self): + def test_aws_signer_when_credentials_is_null(self) -> None: region = "us-west-1" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth @@ -235,7 +235,7 @@ def test_aws_signer_when_credentials_is_null(self): Urllib3AWSV4SignerAuth("", region) assert str(e.value) == "Credentials cannot be empty" - def test_aws_signer_when_service_is_specified(self): + def test_aws_signer_when_service_is_specified(self) -> None: region = "us-west-1" service = "aoss" @@ -259,7 +259,7 @@ def mock_session(self): return dummy_session - def test_uses_https_if_verify_certs_is_off(self): + def test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertEqual(1, len(w)) @@ -270,7 +270,7 @@ def test_uses_https_if_verify_certs_is_off(self): self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): + def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -279,17 +279,17 @@ def test_nowarn_when_uses_https_if_verify_certs_is_off(self): self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - def test_doesnt_use_https_if_not_specified(self): + def test_doesnt_use_https_if_not_specified(self) -> None: con = Urllib3HttpConnection() self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) - def test_no_warning_when_using_ssl_context(self): + def test_no_warning_when_using_ssl_context(self) -> None: ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: Urllib3HttpConnection(ssl_context=ctx) self.assertEqual(0, len(w)) - def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): + def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, @@ -311,21 +311,21 @@ def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): str(w[0].message), ) - def test_uses_given_ca_certs(self): + def test_uses_given_ca_certs(self) -> None: path = "/path/to/my/ca_certs.pem" c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) self.assertEqual(path, c.pool.ca_certs) - def test_uses_default_ca_certs(self): + def test_uses_default_ca_certs(self) -> None: c = Urllib3HttpConnection(use_ssl=True) self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) - def test_uses_no_ca_certs(self): + def test_uses_no_ca_certs(self) -> None: c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertIsNone(c.pool.ca_certs) @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): + def test_uncompressed_body_logged(self, logger) -> None: con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -335,13 +335,13 @@ def test_uncompressed_body_logged(self, logger): self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) self.assertEqual("< {}", resp[0][0] % resp[0][1:]) - def test_surrogatepass_into_bytes(self): + def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip - def test_recursion_error_reraised(self): + def test_recursion_error_reraised(self) -> None: conn = Urllib3HttpConnection() def urlopen_raise(*_, **__): @@ -367,7 +367,9 @@ def mock_session(self): return dummy_session - def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth(self): + def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth( + self, + ) -> None: region = "us-west-2" from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index f08b6f24..5630030e 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -40,16 +40,16 @@ class TestConnectionPool(TestCase): - def test_dummy_cp_raises_exception_on_more_connections(self): + def test_dummy_cp_raises_exception_on_more_connections(self) -> None: self.assertRaises(ImproperlyConfigured, DummyConnectionPool, []) self.assertRaises( ImproperlyConfigured, DummyConnectionPool, [object(), object()] ) - def test_raises_exception_when_no_connections_defined(self): + def test_raises_exception_when_no_connections_defined(self) -> None: self.assertRaises(ImproperlyConfigured, ConnectionPool, []) - def test_default_round_robin(self): + def test_default_round_robin(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) connections = set() @@ -83,7 +83,7 @@ def select(self, connections): connections.append(pool.get_connection()) self.assertEqual(connections, [x * x for x in range(100)]) - def test_dead_nodes_are_removed_from_active_connections(self): + def test_dead_nodes_are_removed_from_active_connections(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -92,7 +92,7 @@ def test_dead_nodes_are_removed_from_active_connections(self): self.assertEqual(1, pool.dead.qsize()) self.assertEqual((now + 60, 42), pool.dead.get()) - def test_connection_is_skipped_when_dead(self): + def test_connection_is_skipped_when_dead(self) -> None: pool = ConnectionPool([(x, {}) for x in range(2)]) pool.mark_dead(0) @@ -101,7 +101,7 @@ def test_connection_is_skipped_when_dead(self): [pool.get_connection(), pool.get_connection(), pool.get_connection()], ) - def test_new_connection_is_not_marked_dead(self): + def test_new_connection_is_not_marked_dead(self) -> None: # Create 10 connections pool = ConnectionPool([(Connection(), {}) for _ in range(10)]) @@ -112,7 +112,9 @@ def test_new_connection_is_not_marked_dead(self): # Nothing should be marked dead self.assertEqual(0, len(pool.dead_count)) - def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self): + def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible( + self, + ) -> None: pool = ConnectionPool([(x, {}) for x in range(2)]) pool.dead_count[0] = 1 pool.mark_dead(0) # failed twice, longer timeout @@ -122,7 +124,7 @@ def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self self.assertEqual(1, pool.get_connection()) self.assertEqual([1], pool.connections) - def test_connection_is_resurrected_after_its_timeout(self): + def test_connection_is_resurrected_after_its_timeout(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -131,7 +133,7 @@ def test_connection_is_resurrected_after_its_timeout(self): self.assertEqual(42, pool.connections[-1]) self.assertEqual(100, len(pool.connections)) - def test_force_resurrect_always_returns_a_connection(self): + def test_force_resurrect_always_returns_a_connection(self) -> None: pool = ConnectionPool([(0, {})]) pool.connections = [] @@ -139,7 +141,7 @@ def test_force_resurrect_always_returns_a_connection(self): self.assertEqual([], pool.connections) self.assertTrue(pool.dead.empty()) - def test_already_failed_connection_has_longer_timeout(self): + def test_already_failed_connection_has_longer_timeout(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 @@ -148,7 +150,7 @@ def test_already_failed_connection_has_longer_timeout(self): self.assertEqual(3, pool.dead_count[42]) self.assertEqual((now + 4 * 60, 42), pool.dead.get()) - def test_timeout_for_failed_connections_is_limitted(self): + def test_timeout_for_failed_connections_is_limitted(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 245 @@ -157,7 +159,7 @@ def test_timeout_for_failed_connections_is_limitted(self): self.assertEqual(246, pool.dead_count[42]) self.assertEqual((now + 32 * 60, 42), pool.dead.get()) - def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self): + def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 diff --git a/test_opensearchpy/test_exceptions.py b/test_opensearchpy/test_exceptions.py index 0b4150fb..26e9e044 100644 --- a/test_opensearchpy/test_exceptions.py +++ b/test_opensearchpy/test_exceptions.py @@ -32,7 +32,7 @@ class TestTransformError(TestCase): - def test_transform_error_parse_with_error_reason(self): + def test_transform_error_parse_with_error_reason(self) -> None: e = TransportError( 500, "InternalServerError", @@ -43,7 +43,7 @@ def test_transform_error_parse_with_error_reason(self): str(e), "TransportError(500, 'InternalServerError', 'error reason')" ) - def test_transform_error_parse_with_error_string(self): + def test_transform_error_parse_with_error_string(self) -> None: e = TransportError( 500, "InternalServerError", {"error": "something error message"} ) diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 3538ae28..35b78d9a 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -60,7 +60,7 @@ class TestParallelBulk(TestCase): "opensearchpy.helpers.actions._process_bulk_chunk", side_effect=mock_process_bulk_chunk, ) - def test_all_chunks_sent(self, _process_bulk_chunk): + def test_all_chunks_sent(self, _process_bulk_chunk) -> None: actions = ({"x": i} for i in range(100)) list(helpers.parallel_bulk(OpenSearch(), actions, chunk_size=2)) @@ -74,7 +74,7 @@ def test_all_chunks_sent(self, _process_bulk_chunk): (True, time.sleep(0.001) or threading.current_thread().ident) ], ) - def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): + def test_chunk_sent_from_different_threads(self, _process_bulk_chunk) -> None: actions = ({"x": i} for i in range(100)) results = list( helpers.parallel_bulk(OpenSearch(), actions, thread_count=10, chunk_size=2) @@ -83,10 +83,10 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): class TestChunkActions(TestCase): - def setup_method(self, _): + def setup_method(self, _) -> None: self.actions = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip - def test_expand_action(self): + def test_expand_action(self) -> None: self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) self.assertEqual( helpers.expand_action({"key": "val"}), ({"index": {}}, {"key": "val"}) @@ -123,7 +123,7 @@ def test_expand_action_actions(self): ({"create": {"_id": "id", "_index": "index"}}, {"key": "val"}), ) - def test_expand_action_options(self): + def test_expand_action_options(self) -> None: for option in ( "_id", "_index", @@ -182,7 +182,7 @@ def test__source_metadata_or_source(self): ({"update": {}}, {"key2": "val2"}), ) - def test_chunks_are_chopped_by_byte_size(self): + def test_chunks_are_chopped_by_byte_size(self) -> None: self.assertEqual( 100, len( @@ -190,7 +190,7 @@ def test_chunks_are_chopped_by_byte_size(self): ), ) - def test_chunks_are_chopped_by_chunk_size(self): + def test_chunks_are_chopped_by_chunk_size(self) -> None: self.assertEqual( 10, len( @@ -200,7 +200,7 @@ def test_chunks_are_chopped_by_chunk_size(self): ), ) - def test_chunks_are_chopped_by_byte_size_properly(self): + def test_chunks_are_chopped_by_byte_size_properly(self) -> None: max_byte_size = 170 chunks = list( helpers._chunk_actions( @@ -215,7 +215,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self): class TestExpandActions(TestCase): - def test_string_actions_are_marked_as_simple_inserts(self): + def test_string_actions_are_marked_as_simple_inserts(self) -> None: self.assertEqual( ('{"index":{}}', "whatever"), helpers.expand_action("whatever") ) diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 057e7f16..f46dd132 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -30,7 +30,7 @@ from opensearchpy.helpers import aggs, query -def test_repr(): +def test_repr() -> None: max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -50,7 +50,7 @@ def test_meta(): } == a.to_dict() -def test_meta_from_dict(): +def test_meta_from_dict() -> None: max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -59,7 +59,7 @@ def test_meta_from_dict(): assert aggs.A(a.to_dict()) == a -def test_A_creates_proper_agg(): +def test_A_creates_proper_agg() -> None: a = aggs.A("terms", field="tags") assert isinstance(a, aggs.Terms) @@ -74,7 +74,7 @@ def test_A_handles_nested_aggs_properly(): assert a._params == {"field": "tags", "aggs": {"max_score": max_score}} -def test_A_passes_aggs_through(): +def test_A_passes_aggs_through() -> None: a = aggs.A("terms", field="tags") assert aggs.A(a) is a @@ -115,14 +115,14 @@ def test_A_fails_with_incorrect_dict(): aggs.A(d) -def test_A_fails_with_agg_and_params(): +def test_A_fails_with_agg_and_params() -> None: a = aggs.A("terms", field="tags") with raises(Exception): aggs.A(a, field="score") -def test_buckets_are_nestable(): +def test_buckets_are_nestable() -> None: a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -131,7 +131,7 @@ def test_buckets_are_nestable(): assert a.aggs == {"per_author": b} -def test_metric_inside_buckets(): +def test_metric_inside_buckets() -> None: a = aggs.Terms(field="tags") b = a.metric("max_score", "max", field="score") @@ -140,7 +140,7 @@ def test_metric_inside_buckets(): assert a.aggs["max_score"] == aggs.Max(field="score") -def test_buckets_equals_counts_subaggs(): +def test_buckets_equals_counts_subaggs() -> None: a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") b = aggs.Terms(field="tags") @@ -166,7 +166,7 @@ def test_buckets_to_dict(): } == a.to_dict() -def test_nested_buckets_are_reachable_as_getitem(): +def test_nested_buckets_are_reachable_as_getitem() -> None: a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -174,14 +174,14 @@ def test_nested_buckets_are_reachable_as_getitem(): assert a["per_author"] == b -def test_nested_buckets_are_settable_as_getitem(): +def test_nested_buckets_are_settable_as_getitem() -> None: a = aggs.Terms(field="tags") b = a["per_author"] = aggs.A("terms", field="author.raw") assert a.aggs["per_author"] is b -def test_filter_can_be_instantiated_using_positional_args(): +def test_filter_can_be_instantiated_using_positional_args() -> None: a = aggs.Filter(query.Q("term", f=42)) assert {"filter": {"term": {"f": 42}}} == a.to_dict() @@ -209,7 +209,7 @@ def test_filter_aggregation_with_nested_aggs(): } == a.to_dict() -def test_filters_correctly_identifies_the_hash(): +def test_filters_correctly_identifies_the_hash() -> None: a = aggs.A( "filters", filters={ @@ -266,19 +266,19 @@ def test_bucket_sort_agg_only_trnunc(): } == a.to_dict() -def test_geohash_grid_aggregation(): +def test_geohash_grid_aggregation() -> None: a = aggs.GeohashGrid(**{"field": "centroid", "precision": 3}) assert {"geohash_grid": {"field": "centroid", "precision": 3}} == a.to_dict() -def test_geotile_grid_aggregation(): +def test_geotile_grid_aggregation() -> None: a = aggs.GeotileGrid(**{"field": "centroid", "precision": 3}) assert {"geotile_grid": {"field": "centroid", "precision": 3}} == a.to_dict() -def test_boxplot_aggregation(): +def test_boxplot_aggregation() -> None: a = aggs.Boxplot(field="load_time") assert {"boxplot": {"field": "load_time"}} == a.to_dict() @@ -305,12 +305,12 @@ def test_rare_terms_aggregation(): } == a.to_dict() -def test_variable_width_histogram_aggregation(): +def test_variable_width_histogram_aggregation() -> None: a = aggs.VariableWidthHistogram(field="price", buckets=2) assert {"variable_width_histogram": {"buckets": 2, "field": "price"}} == a.to_dict() -def test_median_absolute_deviation_aggregation(): +def test_median_absolute_deviation_aggregation() -> None: a = aggs.MedianAbsoluteDeviation(field="rating") assert {"median_absolute_deviation": {"field": "rating"}} == a.to_dict() @@ -359,7 +359,7 @@ def test_moving_percentiles_aggregation(): } == a.to_dict() -def test_normalize_aggregation(): +def test_normalize_aggregation() -> None: a = aggs.Normalize(buckets_path="normalized", method="percent_of_sum") assert { "normalize": {"buckets_path": "normalized", "method": "percent_of_sum"} diff --git a/test_opensearchpy/test_helpers/test_analysis.py b/test_opensearchpy/test_helpers/test_analysis.py index 49a1d1fd..7b8f6b04 100644 --- a/test_opensearchpy/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_helpers/test_analysis.py @@ -30,7 +30,7 @@ from opensearchpy.helpers import analysis -def test_analyzer_serializes_as_name(): +def test_analyzer_serializes_as_name() -> None: a = analysis.analyzer("my_analyzer") assert "my_analyzer" == a.to_dict() @@ -144,7 +144,7 @@ def test_conditional_token_filter(): } == a.get_analysis_definition() -def test_conflicting_nested_filters_cause_error(): +def test_conflicting_nested_filters_cause_error() -> None: a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -166,7 +166,7 @@ def test_conflicting_nested_filters_cause_error(): a.get_analysis_definition() -def test_normalizer_serializes_as_name(): +def test_normalizer_serializes_as_name() -> None: n = analysis.normalizer("my_normalizer") assert "my_normalizer" == n.to_dict() @@ -184,7 +184,7 @@ def test_normalizer_has_definition(): } == n.get_definition() -def test_tokenizer(): +def test_tokenizer() -> None: t = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) assert t.to_dict() == "trigram" @@ -218,7 +218,7 @@ def test_custom_analyzer_can_collect_custom_items(): } == a.get_analysis_definition() -def test_stemmer_analyzer_can_pass_name(): +def test_stemmer_analyzer_can_pass_name() -> None: t = analysis.token_filter( "my_english_filter", name="minimal_english", type="stemmer" ) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index d2da16e0..ed78b4c0 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -129,7 +129,7 @@ class Index: name = "test-host" -def test_range_serializes_properly(): +def test_range_serializes_properly() -> None: class D(document.Document): lr = field.LongRange() @@ -142,7 +142,7 @@ class D(document.Document): assert {"lr": {"lt": 42}} == d.to_dict() -def test_range_deserializes_properly(): +def test_range_deserializes_properly() -> None: class D(document.InnerDoc): lr = field.LongRange() @@ -152,13 +152,13 @@ class D(document.InnerDoc): assert 47 not in d.lr -def test_resolve_nested(): +def test_resolve_nested() -> None: nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] -def test_conflicting_mapping_raises_error_in_index_to_dict(): +def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: class A(document.Document): name = field.Text() @@ -173,18 +173,18 @@ class B(document.Document): i.to_dict() -def test_ip_address_serializes_properly(): +def test_ip_address_serializes_properly() -> None: host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() -def test_matches_uses_index(): +def test_matches_uses_index() -> None: assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) -def test_matches_with_no_name_always_matches(): +def test_matches_with_no_name_always_matches() -> None: class D(document.Document): pass @@ -192,7 +192,7 @@ class D(document.Document): assert D._matches({"_index": "whatever"}) -def test_matches_accepts_wildcards(): +def test_matches_accepts_wildcards() -> None: class MyDoc(document.Document): class Index: name = "my-*" @@ -201,7 +201,7 @@ class Index: assert not MyDoc._matches({"_index": "not-my-index"}) -def test_assigning_attrlist_to_field(): +def test_assigning_attrlist_to_field() -> None: sc = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -209,13 +209,13 @@ def test_assigning_attrlist_to_field(): assert sc.to_dict()["files"] is ls -def test_optional_inner_objects_are_not_validated_if_missing(): +def test_optional_inner_objects_are_not_validated_if_missing() -> None: d = OptionalObjectWithRequiredField() assert d.full_clean() is None -def test_custom_field(): +def test_custom_field() -> None: s = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s.to_dict() @@ -226,13 +226,13 @@ def test_custom_field(): assert isinstance(s.title, Secret) -def test_custom_field_mapping(): +def test_custom_field_mapping() -> None: assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() -def test_custom_field_in_nested(): +def test_custom_field_in_nested() -> None: s = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -240,7 +240,7 @@ def test_custom_field_in_nested(): assert s.secrets[0].title == "Hello" -def test_multi_works_after_doc_has_been_saved(): +def test_multi_works_after_doc_has_been_saved() -> None: c = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -248,7 +248,7 @@ def test_multi_works_after_doc_has_been_saved(): assert c.to_dict() == {"files": ["setup.py"]} -def test_multi_works_in_nested_after_doc_has_been_serialized(): +def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: # Issue #359 c = DocWithNested(comments=[Comment(title="First!")]) @@ -257,7 +257,7 @@ def test_multi_works_in_nested_after_doc_has_been_serialized(): assert [] == c.comments[0].tags -def test_null_value_for_object(): +def test_null_value_for_object() -> None: d = MyDoc(inner=None) assert d.inner is None @@ -313,21 +313,21 @@ def test_to_dict_with_meta_includes_custom_index(): assert {"_index": "other-index", "_source": {"title": "hello"}} == d.to_dict(True) -def test_to_dict_without_skip_empty_will_include_empty_fields(): +def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: d = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() assert {"tags": [], "title": None, "inner": {}} == d.to_dict(skip_empty=False) -def test_attribute_can_be_removed(): +def test_attribute_can_be_removed() -> None: d = MyDoc(title="hello") del d.title assert "title" not in d._d_ -def test_doc_type_can_be_correctly_pickled(): +def test_doc_type_can_be_correctly_pickled() -> None: d = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -342,7 +342,7 @@ def test_doc_type_can_be_correctly_pickled(): assert isinstance(d2.comments[0], Comment) -def test_meta_is_accessible_even_on_empty_doc(): +def test_meta_is_accessible_even_on_empty_doc() -> None: d = MyDoc() d.meta @@ -369,7 +369,7 @@ class Meta: } == User._doc_type.mapping.to_dict() -def test_multi_value_fields(): +def test_multi_value_fields() -> None: class Blog(document.Document): tags = field.Keyword(multi=True) @@ -380,7 +380,7 @@ class Blog(document.Document): assert ["search", "python"] == b.tags -def test_docs_with_properties(): +def test_docs_with_properties() -> None: class User(document.Document): pwd_hash = field.Text() @@ -408,7 +408,7 @@ def password(self, pwd): u.password -def test_nested_can_be_assigned_to(): +def test_nested_can_be_assigned_to() -> None: d1 = DocWithNested(comments=[Comment(title="First!")]) d2 = DocWithNested() @@ -419,13 +419,13 @@ def test_nested_can_be_assigned_to(): assert isinstance(d2.comments[0], Comment) -def test_nested_can_be_none(): +def test_nested_can_be_none() -> None: d = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() -def test_nested_defaults_to_list_and_can_be_updated(): +def test_nested_defaults_to_list_and_can_be_updated() -> None: md = DocWithNested() assert [] == md.comments @@ -446,7 +446,7 @@ def test_to_dict_is_recursive_and_can_cope_with_multi_values(): } == md.to_dict() -def test_to_dict_ignores_empty_collections(): +def test_to_dict_ignores_empty_collections() -> None: md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() @@ -500,7 +500,7 @@ def test_document_can_be_created_dynamically(): } == md.to_dict() -def test_invalid_date_will_raise_exception(): +def test_invalid_date_will_raise_exception() -> None: md = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): @@ -539,7 +539,7 @@ class B(A): } == B._doc_type.mapping.to_dict() -def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): +def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: md = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" @@ -566,32 +566,32 @@ def test_index_inheritance(): } == MyMultiSubDoc._doc_type.mapping.to_dict() -def test_meta_fields_can_be_set_directly_in_init(): +def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() md = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -def test_save_no_index(mock_client): +def test_save_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): md.save(using="mock") -def test_delete_no_index(mock_client): +def test_delete_no_index(mock_client) -> None: md = MyDoc() with raises(ValidationException): md.delete(using="mock") -def test_update_no_fields(): +def test_update_no_fields() -> None: md = MyDoc() with raises(IllegalOperation): md.update() -def test_search_with_custom_alias_and_index(mock_client): +def test_search_with_custom_alias_and_index(mock_client) -> None: search_object = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index 9fcc68d1..e663bca1 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -49,7 +49,7 @@ class BlogSearch(FacetedSearch): } -def test_query_is_created_properly(): +def test_query_is_created_properly() -> None: bs = BlogSearch("python search") s = bs.build_search() @@ -153,7 +153,7 @@ def test_filters_are_applied_to_search_ant_relevant_facets(): } == d -def test_date_histogram_facet_with_1970_01_01_date(): +def test_date_histogram_facet_with_1970_01_01_date() -> None: dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) @@ -186,7 +186,7 @@ def test_date_histogram_facet_with_1970_01_01_date(): ("fixed_interval", "1h"), ], ) -def test_date_histogram_interval_types(interval_type, interval): +def test_date_histogram_interval_types(interval_type, interval) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -198,7 +198,7 @@ def test_date_histogram_interval_types(interval_type, interval): dhf.get_value_filter(datetime.now()) -def test_date_histogram_no_interval_keyerror(): +def test_date_histogram_no_interval_keyerror() -> None: dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index df30ad69..19582730 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -36,7 +36,7 @@ from opensearchpy.helpers import field -def test_date_range_deserialization(): +def test_date_range_deserialization() -> None: data = {"lt": "2018-01-01T00:30:10"} r = field.DateRange().deserialize(data) @@ -45,7 +45,7 @@ def test_date_range_deserialization(): assert r.lt == datetime(2018, 1, 1, 0, 30, 10) -def test_boolean_deserialization(): +def test_boolean_deserialization() -> None: bf = field.Boolean() assert not bf.deserialize("false") @@ -58,7 +58,7 @@ def test_boolean_deserialization(): assert bf.deserialize(1) -def test_date_field_can_have_default_tz(): +def test_date_field_can_have_default_tz() -> None: f = field.Date(default_timezone="UTC") now = datetime.now() @@ -73,7 +73,7 @@ def test_date_field_can_have_default_tz(): assert now.isoformat() + "+00:00" == now_with_tz.isoformat() -def test_custom_field_car_wrap_other_field(): +def test_custom_field_car_wrap_other_field() -> None: class MyField(field.CustomField): @property def builtin_type(self): @@ -84,7 +84,7 @@ def builtin_type(self): ).to_dict() -def test_field_from_dict(): +def test_field_from_dict() -> None: f = field.construct_field({"type": "text", "index": "not_analyzed"}) assert isinstance(f, field.Text) @@ -107,14 +107,14 @@ def test_multi_fields_are_accepted_and_parsed(): } == f.to_dict() -def test_nested_provides_direct_access_to_its_fields(): +def test_nested_provides_direct_access_to_its_fields() -> None: f = field.Nested(properties={"name": {"type": "text", "index": "not_analyzed"}}) assert "name" in f assert f["name"] == field.Text(index="not_analyzed") -def test_field_supports_multiple_analyzers(): +def test_field_supports_multiple_analyzers() -> None: f = field.Text(analyzer="snowball", search_analyzer="keyword") assert { "analyzer": "snowball", @@ -143,14 +143,14 @@ def test_multifield_supports_multiple_analyzers(): } == f.to_dict() -def test_scaled_float(): +def test_scaled_float() -> None: with pytest.raises(TypeError): field.ScaledFloat() f = field.ScaledFloat(123) assert f.to_dict() == {"scaling_factor": 123, "type": "scaled_float"} -def test_ipaddress(): +def test_ipaddress() -> None: f = field.Ip() assert f.deserialize("127.0.0.1") == ip_address("127.0.0.1") assert f.deserialize("::1") == ip_address("::1") @@ -160,7 +160,7 @@ def test_ipaddress(): assert f.deserialize("not_an_ipaddress") -def test_float(): +def test_float() -> None: f = field.Float() assert f.deserialize("42") == 42.0 assert f.deserialize(None) is None @@ -168,7 +168,7 @@ def test_float(): assert f.deserialize("not_a_float") -def test_integer(): +def test_integer() -> None: f = field.Integer() assert f.deserialize("42") == 42 assert f.deserialize(None) is None @@ -176,30 +176,30 @@ def test_integer(): assert f.deserialize("not_an_integer") -def test_binary(): +def test_binary() -> None: f = field.Binary() assert f.deserialize(base64.b64encode(b"42")) == b"42" assert f.deserialize(f.serialize(b"42")) == b"42" assert f.deserialize(None) is None -def test_constant_keyword(): +def test_constant_keyword() -> None: f = field.ConstantKeyword() assert f.to_dict() == {"type": "constant_keyword"} -def test_rank_features(): +def test_rank_features() -> None: f = field.RankFeatures() assert f.to_dict() == {"type": "rank_features"} -def test_object_dynamic_values(): +def test_object_dynamic_values() -> None: for dynamic in True, False, "strict": f = field.Object(dynamic=dynamic) assert f.to_dict()["dynamic"] == dynamic -def test_object_disabled(): +def test_object_disabled() -> None: f = field.Object(enabled=False) assert f.to_dict() == {"type": "object", "enabled": False} diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index 7163c09e..bb8aa578 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -38,7 +38,7 @@ class Post(Document): published_from = Date() -def test_multiple_doc_types_will_combine_mappings(): +def test_multiple_doc_types_will_combine_mappings() -> None: class User(Document): username = Text() @@ -56,14 +56,14 @@ class User(Document): } == i.to_dict() -def test_search_is_limited_to_index_name(): +def test_search_is_limited_to_index_name() -> None: i = Index("my-index") s = i.search() assert s._index == ["my-index"] -def test_cloned_index_has_copied_settings_and_using(): +def test_cloned_index_has_copied_settings_and_using() -> None: client = object() i = Index("my-index", using=client) i.settings(number_of_shards=1) @@ -76,7 +76,7 @@ def test_cloned_index_has_copied_settings_and_using(): assert i._settings is not i2._settings -def test_cloned_index_has_analysis_attribute(): +def test_cloned_index_has_analysis_attribute() -> None: """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -96,7 +96,7 @@ def test_cloned_index_has_analysis_attribute(): assert i.to_dict()["settings"]["analysis"] == i2.to_dict()["settings"]["analysis"] -def test_settings_are_saved(): +def test_settings_are_saved() -> None: i = Index("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -104,7 +104,7 @@ def test_settings_are_saved(): assert {"settings": {"number_of_shards": 1, "number_of_replicas": 0}} == i.to_dict() -def test_registered_doc_type_included_in_to_dict(): +def test_registered_doc_type_included_in_to_dict() -> None: i = Index("i", using="alias") i.document(Post) @@ -118,7 +118,7 @@ def test_registered_doc_type_included_in_to_dict(): } == i.to_dict() -def test_registered_doc_type_included_in_search(): +def test_registered_doc_type_included_in_search() -> None: i = Index("i", using="alias") i.document(Post) @@ -127,7 +127,7 @@ def test_registered_doc_type_included_in_search(): assert s._doc_type == [Post] -def test_aliases_add_to_object(): +def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -137,7 +137,7 @@ def test_aliases_add_to_object(): assert index._aliases == alias_dict -def test_aliases_returned_from_to_dict(): +def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) alias_dict = {random_alias: {}} @@ -176,7 +176,7 @@ def test_analyzers_returned_from_to_dict(): ] == {"filter": ["standard"], "type": "custom", "tokenizer": "standard"} -def test_conflicting_analyzer_raises_error(): +def test_conflicting_analyzer_raises_error() -> None: i = Index("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) @@ -191,7 +191,7 @@ def test_index_template_can_have_order(): assert {"index_patterns": ["i-*"], "order": 2} == it.to_dict() -def test_index_template_save_result(mock_client): +def test_index_template_save_result(mock_client) -> None: it = IndexTemplate("test-template", "test-*") assert it.save(using="mock") == mock_client.indices.put_template() diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index ad042c58..5e4e49ce 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -31,7 +31,7 @@ from opensearchpy.helpers import analysis, mapping -def test_mapping_can_has_fields(): +def test_mapping_can_has_fields() -> None: m = mapping.Mapping() m.field("name", "text").field("tags", "keyword") @@ -73,7 +73,7 @@ def test_mapping_update_is_recursive(): } == m1.to_dict() -def test_properties_can_iterate_over_all_the_fields(): +def test_properties_can_iterate_over_all_the_fields() -> None: m = mapping.Mapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -202,7 +202,7 @@ def test_mapping_can_collect_multiple_analyzers(): } == m._collect_analysis() -def test_even_non_custom_analyzers_can_have_params(): +def test_even_non_custom_analyzers_can_have_params() -> None: a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.Mapping() m.field("title", "text", analyzer=a1) @@ -212,14 +212,14 @@ def test_even_non_custom_analyzers_can_have_params(): } == m._collect_analysis() -def test_resolve_field_can_resolve_multifields(): +def test_resolve_field_can_resolve_multifields() -> None: m = mapping.Mapping() m.field("title", "text", fields={"keyword": Keyword()}) assert isinstance(m.resolve_field("title.keyword"), Keyword) -def test_resolve_nested(): +def test_resolve_nested() -> None: m = mapping.Mapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 95acfbe5..142b865c 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -30,61 +30,61 @@ from opensearchpy.helpers import function, query -def test_empty_Q_is_match_all(): +def test_empty_Q_is_match_all() -> None: q = query.Q() assert isinstance(q, query.MatchAll) assert query.MatchAll() == q -def test_match_to_dict(): +def test_match_to_dict() -> None: assert {"match": {"f": "value"}} == query.Match(f="value").to_dict() -def test_match_to_dict_extra(): +def test_match_to_dict_extra() -> None: assert {"match": {"f": "value", "boost": 2}} == query.Match( f="value", boost=2 ).to_dict() -def test_fuzzy_to_dict(): +def test_fuzzy_to_dict() -> None: assert {"fuzzy": {"f": "value"}} == query.Fuzzy(f="value").to_dict() -def test_prefix_to_dict(): +def test_prefix_to_dict() -> None: assert {"prefix": {"f": "value"}} == query.Prefix(f="value").to_dict() -def test_term_to_dict(): +def test_term_to_dict() -> None: assert {"term": {"_type": "article"}} == query.Term(_type="article").to_dict() -def test_bool_to_dict(): +def test_bool_to_dict() -> None: bool = query.Bool(must=[query.Match(f="value")], should=[]) assert {"bool": {"must": [{"match": {"f": "value"}}]}} == bool.to_dict() -def test_dismax_to_dict(): +def test_dismax_to_dict() -> None: assert {"dis_max": {"queries": [{"term": {"_type": "article"}}]}} == query.DisMax( queries=[query.Term(_type="article")] ).to_dict() -def test_bool_from_dict_issue_318(): +def test_bool_from_dict_issue_318() -> None: d = {"bool": {"must_not": {"match": {"field": "value"}}}} q = query.Q(d) assert q == ~query.Match(field="value") -def test_repr(): +def test_repr() -> None: bool = query.Bool(must=[query.Match(f="value")], should=[]) assert "Bool(must=[Match(f='value')])" == repr(bool) -def test_query_clone(): +def test_query_clone() -> None: bool = query.Bool( must=[query.Match(x=42)], should=[query.Match(g="v2")], @@ -96,14 +96,14 @@ def test_query_clone(): assert bool is not bool_clone -def test_bool_converts_its_init_args_to_queries(): +def test_bool_converts_its_init_args_to_queries() -> None: q = query.Bool(must=[{"match": {"f": "value"}}]) assert len(q.must) == 1 assert q.must[0] == query.Match(f="value") -def test_two_queries_make_a_bool(): +def test_two_queries_make_a_bool() -> None: q1 = query.Match(f="value1") q2 = query.Match(message={"query": "this is a test", "opeartor": "and"}) q = q1 & q2 @@ -112,7 +112,7 @@ def test_two_queries_make_a_bool(): assert [q1, q2] == q.must -def test_other_and_bool_appends_other_to_must(): +def test_other_and_bool_appends_other_to_must() -> None: q1 = query.Match(f="value1") qb = query.Bool() @@ -121,7 +121,7 @@ def test_other_and_bool_appends_other_to_must(): assert q.must[0] == q1 -def test_bool_and_other_appends_other_to_must(): +def test_bool_and_other_appends_other_to_must() -> None: q1 = query.Match(f="value1") qb = query.Bool() @@ -130,7 +130,7 @@ def test_bool_and_other_appends_other_to_must(): assert q.must[0] == q1 -def test_bool_and_other_sets_min_should_match_if_needed(): +def test_bool_and_other_sets_min_should_match_if_needed() -> None: q1 = query.Q("term", category=1) q2 = query.Q( "bool", should=[query.Q("term", name="aaa"), query.Q("term", name="bbb")] @@ -144,7 +144,7 @@ def test_bool_and_other_sets_min_should_match_if_needed(): ) -def test_bool_with_different_minimum_should_match_should_not_be_combined(): +def test_bool_with_different_minimum_should_match_should_not_be_combined() -> None: q1 = query.Q( "bool", minimum_should_match=2, @@ -183,11 +183,11 @@ def test_bool_with_different_minimum_should_match_should_not_be_combined(): assert q5 == query.Bool(should=[q1, q2, q3]) -def test_empty_bool_has_min_should_match_0(): +def test_empty_bool_has_min_should_match_0() -> None: assert 0 == query.Bool()._min_should_match -def test_query_and_query_creates_bool(): +def test_query_and_query_creates_bool() -> None: q1 = query.Match(f=42) q2 = query.Match(g=47) @@ -196,7 +196,7 @@ def test_query_and_query_creates_bool(): assert q.must == [q1, q2] -def test_match_all_and_query_equals_other(): +def test_match_all_and_query_equals_other() -> None: q1 = query.Match(f=42) q2 = query.MatchAll() @@ -204,39 +204,39 @@ def test_match_all_and_query_equals_other(): assert q1 == q -def test_not_match_all_is_match_none(): +def test_not_match_all_is_match_none() -> None: q = query.MatchAll() assert ~q == query.MatchNone() -def test_not_match_none_is_match_all(): +def test_not_match_none_is_match_all() -> None: q = query.MatchNone() assert ~q == query.MatchAll() -def test_invert_empty_bool_is_match_none(): +def test_invert_empty_bool_is_match_none() -> None: q = query.Bool() assert ~q == query.MatchNone() -def test_match_none_or_query_equals_query(): +def test_match_none_or_query_equals_query() -> None: q1 = query.Match(f=42) q2 = query.MatchNone() assert q1 | q2 == query.Match(f=42) -def test_match_none_and_query_equals_match_none(): +def test_match_none_and_query_equals_match_none() -> None: q1 = query.Match(f=42) q2 = query.MatchNone() assert q1 & q2 == query.MatchNone() -def test_bool_and_bool(): +def test_bool_and_bool() -> None: qt1, qt2, qt3 = query.Match(f=1), query.Match(f=2), query.Match(f=3) q1 = query.Bool(must=[qt1], should=[qt2]) @@ -252,7 +252,7 @@ def test_bool_and_bool(): ) -def test_bool_and_bool_with_min_should_match(): +def test_bool_and_bool_with_min_should_match() -> None: qt1, qt2 = query.Match(f=1), query.Match(f=2) q1 = query.Q("bool", minimum_should_match=1, should=[qt1]) q2 = query.Q("bool", minimum_should_match=1, should=[qt2]) @@ -260,19 +260,19 @@ def test_bool_and_bool_with_min_should_match(): assert query.Q("bool", must=[qt1, qt2]) == q1 & q2 -def test_inverted_query_becomes_bool_with_must_not(): +def test_inverted_query_becomes_bool_with_must_not() -> None: q = query.Match(f=42) assert ~q == query.Bool(must_not=[query.Match(f=42)]) -def test_inverted_query_with_must_not_become_should(): +def test_inverted_query_with_must_not_become_should() -> None: q = query.Q("bool", must_not=[query.Q("match", f=1), query.Q("match", f=2)]) assert ~q == query.Q("bool", should=[query.Q("match", f=1), query.Q("match", f=2)]) -def test_inverted_query_with_must_and_must_not(): +def test_inverted_query_with_must_and_must_not() -> None: q = query.Q( "bool", must=[query.Q("match", f=3), query.Q("match", f=4)], @@ -292,13 +292,13 @@ def test_inverted_query_with_must_and_must_not(): ) -def test_double_invert_returns_original_query(): +def test_double_invert_returns_original_query() -> None: q = query.Match(f=42) assert q == ~~q -def test_bool_query_gets_inverted_internally(): +def test_bool_query_gets_inverted_internally() -> None: q = query.Bool(must_not=[query.Match(f=42)], must=[query.Match(g="v")]) assert ~q == query.Bool( @@ -311,7 +311,7 @@ def test_bool_query_gets_inverted_internally(): ) -def test_match_all_or_something_is_match_all(): +def test_match_all_or_something_is_match_all() -> None: q1 = query.MatchAll() q2 = query.Match(f=42) @@ -319,7 +319,7 @@ def test_match_all_or_something_is_match_all(): assert (q2 | q1) == query.MatchAll() -def test_or_produces_bool_with_should(): +def test_or_produces_bool_with_should() -> None: q1 = query.Match(f=42) q2 = query.Match(g="v") @@ -327,7 +327,7 @@ def test_or_produces_bool_with_should(): assert q == query.Bool(should=[q1, q2]) -def test_or_bool_doesnt_loop_infinitely_issue_37(): +def test_or_bool_doesnt_loop_infinitely_issue_37() -> None: q = query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -335,7 +335,7 @@ def test_or_bool_doesnt_loop_infinitely_issue_37(): ) -def test_or_bool_doesnt_loop_infinitely_issue_96(): +def test_or_bool_doesnt_loop_infinitely_issue_96() -> None: q = ~query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -346,14 +346,14 @@ def test_or_bool_doesnt_loop_infinitely_issue_96(): ) -def test_bool_will_append_another_query_with_or(): +def test_bool_will_append_another_query_with_or() -> None: qb = query.Bool(should=[query.Match(f="v"), query.Match(f="v2")]) q = query.Match(g=42) assert (q | qb) == query.Bool(should=[query.Match(f="v"), query.Match(f="v2"), q]) -def test_bool_queries_with_only_should_get_concatenated(): +def test_bool_queries_with_only_should_get_concatenated() -> None: q1 = query.Bool(should=[query.Match(f=1), query.Match(f=2)]) q2 = query.Bool(should=[query.Match(f=3), query.Match(f=4)]) @@ -362,7 +362,7 @@ def test_bool_queries_with_only_should_get_concatenated(): ) -def test_two_bool_queries_append_one_to_should_if_possible(): +def test_two_bool_queries_append_one_to_should_if_possible() -> None: q1 = query.Bool(should=[query.Match(f="v")]) q2 = query.Bool(must=[query.Match(f="v")]) @@ -374,12 +374,12 @@ def test_two_bool_queries_append_one_to_should_if_possible(): ) -def test_queries_are_registered(): +def test_queries_are_registered() -> None: assert "match" in query.Query._classes assert query.Query._classes["match"] is query.Match -def test_defining_query_registers_it(): +def test_defining_query_registers_it() -> None: class MyQuery(query.Query): name = "my_query" @@ -387,62 +387,62 @@ class MyQuery(query.Query): assert query.Query._classes["my_query"] is MyQuery -def test_Q_passes_query_through(): +def test_Q_passes_query_through() -> None: q = query.Match(f="value1") assert query.Q(q) is q -def test_Q_constructs_query_by_name(): +def test_Q_constructs_query_by_name() -> None: q = query.Q("match", f="value") assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_translates_double_underscore_to_dots_in_param_names(): +def test_Q_translates_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza") assert {"comment.author": "honza"} == q._params -def test_Q_doesn_translate_double_underscore_to_dots_in_param_names(): +def test_Q_doesn_translate_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza", _expand__to_dot=False) assert {"comment__author": "honza"} == q._params -def test_Q_constructs_simple_query_from_dict(): +def test_Q_constructs_simple_query_from_dict() -> None: q = query.Q({"match": {"f": "value"}}) assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_constructs_compound_query_from_dict(): +def test_Q_constructs_compound_query_from_dict() -> None: q = query.Q({"bool": {"must": [{"match": {"f": "value"}}]}}) assert q == query.Bool(must=[query.Match(f="value")]) -def test_Q_raises_error_when_passed_in_dict_and_params(): +def test_Q_raises_error_when_passed_in_dict_and_params() -> None: with raises(Exception): query.Q({"match": {"f": "value"}}, f="value") -def test_Q_raises_error_when_passed_in_query_and_params(): +def test_Q_raises_error_when_passed_in_query_and_params() -> None: q = query.Match(f="value1") with raises(Exception): query.Q(q, f="value") -def test_Q_raises_error_on_unknown_query(): +def test_Q_raises_error_on_unknown_query() -> None: with raises(Exception): query.Q("not a query", f="value") -def test_match_all_and_anything_is_anything(): +def test_match_all_and_anything_is_anything() -> None: q = query.MatchAll() s = query.Match(f=42) @@ -450,7 +450,7 @@ def test_match_all_and_anything_is_anything(): assert s & q == s -def test_function_score_with_functions(): +def test_function_score_with_functions() -> None: q = query.Q( "function_score", functions=[query.SF("script_score", script="doc['comment_count'] * _score")], @@ -550,7 +550,7 @@ def test_function_score_from_dict(): assert {"boost_factor": 6} == sf.to_dict() -def test_script_score(): +def test_script_score() -> None: d = { "script_score": { "query": {"match_all": {}}, diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index 83fe8a08..657beb05 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -41,7 +41,7 @@ def agg_response(aggs_search, aggs_data): return response.Response(aggs_search, aggs_data) -def test_agg_response_is_pickleable(agg_response): +def test_agg_response_is_pickleable(agg_response) -> None: agg_response.hits r = pickle.loads(pickle.dumps(agg_response)) @@ -50,7 +50,7 @@ def test_agg_response_is_pickleable(agg_response): assert r.hits == agg_response.hits -def test_response_is_pickleable(dummy_response): +def test_response_is_pickleable(dummy_response) -> None: res = response.Response(Search(), dummy_response) res.hits r = pickle.loads(pickle.dumps(res)) @@ -60,7 +60,7 @@ def test_response_is_pickleable(dummy_response): assert r.hits == res.hits -def test_hit_is_pickleable(dummy_response): +def test_hit_is_pickleable(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = pickle.loads(pickle.dumps(res.hits)) @@ -68,14 +68,14 @@ def test_hit_is_pickleable(dummy_response): assert hits[0].meta == res.hits[0].meta -def test_response_stores_search(dummy_response): +def test_response_stores_search(dummy_response) -> None: s = Search() r = response.Response(s, dummy_response) assert r._search is s -def test_interactive_helpers(dummy_response): +def test_interactive_helpers(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = res.hits h = hits[0] @@ -98,19 +98,19 @@ def test_interactive_helpers(dummy_response): ] == repr(h) -def test_empty_response_is_false(dummy_response): +def test_empty_response_is_false(dummy_response) -> None: dummy_response["hits"]["hits"] = [] res = response.Response(Search(), dummy_response) assert not res -def test_len_response(dummy_response): +def test_len_response(dummy_response) -> None: res = response.Response(Search(), dummy_response) assert len(res) == 4 -def test_iterating_over_response_gives_you_hits(dummy_response): +def test_iterating_over_response_gives_you_hits(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = list(h for h in res) @@ -127,7 +127,7 @@ def test_iterating_over_response_gives_you_hits(dummy_response): assert hits[1].meta.routing == "opensearch" -def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response): +def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response) -> None: res = response.Response(Search(), dummy_response) hits = res.hits @@ -135,7 +135,7 @@ def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response): assert 12.0 == hits.max_score -def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response): +def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response) -> None: res = response.Response(Search(), dummy_response) h = res.hits[0] @@ -151,30 +151,30 @@ def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response): h.not_there -def test_slicing_on_response_slices_on_hits(dummy_response): +def test_slicing_on_response_slices_on_hits(dummy_response) -> None: res = response.Response(Search(), dummy_response) assert res[0] is res.hits[0] assert res[::-1] == res.hits[::-1] -def test_aggregation_base(agg_response): +def test_aggregation_base(agg_response) -> None: assert agg_response.aggs is agg_response.aggregations assert isinstance(agg_response.aggs, response.AggResponse) -def test_metric_agg_works(agg_response): +def test_metric_agg_works(agg_response) -> None: assert 25052.0 == agg_response.aggs.sum_lines.value -def test_aggregations_can_be_iterated_over(agg_response): +def test_aggregations_can_be_iterated_over(agg_response) -> None: aggs = [a for a in agg_response.aggs] assert len(aggs) == 3 assert all(map(lambda a: isinstance(a, AggResponse), aggs)) -def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search): +def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search) -> None: a = agg_response.aggs["popular_files"] assert isinstance(a, BucketData) @@ -182,7 +182,7 @@ def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search): assert a._meta["aggs"] is aggs_search.aggs.aggs["popular_files"] -def test_bucket_response_can_be_iterated_over(agg_response): +def test_bucket_response_can_be_iterated_over(agg_response) -> None: popular_files = agg_response.aggregations.popular_files buckets = [b for b in popular_files] @@ -190,7 +190,7 @@ def test_bucket_response_can_be_iterated_over(agg_response): assert buckets == popular_files.buckets -def test_bucket_keys_get_deserialized(aggs_data, aggs_search): +def test_bucket_keys_get_deserialized(aggs_data, aggs_search) -> None: class Commit(Document): info = Object(properties={"committed_date": Date()}) diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index dae61a00..73d078a9 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -34,13 +34,13 @@ from opensearchpy.helpers import query, search -def test_expand__to_dot_is_respected(): +def test_expand__to_dot_is_respected() -> None: s = search.Search().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() -def test_execute_uses_cache(): +def test_execute_uses_cache() -> None: s = search.Search() r = object() s._response = r @@ -48,7 +48,7 @@ def test_execute_uses_cache(): assert r is s.execute() -def test_cache_can_be_ignored(mock_client): +def test_cache_can_be_ignored(mock_client) -> None: s = search.Search(using="mock") r = object() s._response = r @@ -57,27 +57,27 @@ def test_cache_can_be_ignored(mock_client): mock_client.search.assert_called_once_with(index=None, body={}) -def test_iter_iterates_over_hits(): +def test_iter_iterates_over_hits() -> None: s = search.Search() s._response = [1, 2, 3] assert [1, 2, 3] == list(s) -def test_cache_isnt_cloned(): +def test_cache_isnt_cloned() -> None: s = search.Search() s._response = object() assert not hasattr(s._clone(), "_response") -def test_search_starts_with_no_query(): +def test_search_starts_with_no_query() -> None: s = search.Search() assert s.query._proxied is None -def test_search_query_combines_query(): +def test_search_query_combines_query() -> None: s = search.Search() s2 = s.query("match", f=42) @@ -89,7 +89,7 @@ def test_search_query_combines_query(): assert s3.query._proxied == query.Bool(must=[query.Match(f=42), query.Match(f=43)]) -def test_query_can_be_assigned_to(): +def test_query_can_be_assigned_to() -> None: s = search.Search() q = Q("match", title="python") @@ -113,7 +113,7 @@ def test_query_can_be_wrapped(): } == s.to_dict() -def test_using(): +def test_using() -> None: o = object() o2 = object() s = search.Search(using=o) @@ -123,19 +123,19 @@ def test_using(): assert s2._using is o2 -def test_methods_are_proxied_to_the_query(): +def test_methods_are_proxied_to_the_query() -> None: s = search.Search().query("match_all") assert s.query.to_dict() == {"match_all": {}} -def test_query_always_returns_search(): +def test_query_always_returns_search() -> None: s = search.Search() assert isinstance(s.query("match", f=42), search.Search) -def test_source_copied_on_clone(): +def test_source_copied_on_clone() -> None: s = search.Search().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -149,7 +149,7 @@ def test_source_copied_on_clone(): assert s3._clone()._source == ["some", "fields"] -def test_copy_clones(): +def test_copy_clones() -> None: from copy import copy s1 = search.Search().source(["some", "fields"]) @@ -159,7 +159,7 @@ def test_copy_clones(): assert s1 is not s2 -def test_aggs_allow_two_metric(): +def test_aggs_allow_two_metric() -> None: s = search.Search() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -201,7 +201,7 @@ def test_aggs_get_copied_on_change(): assert d == s4.to_dict() -def test_search_index(): +def test_search_index() -> None: s = search.Search(index="i") assert s._index == ["i"] s = s.index("i2") @@ -232,7 +232,7 @@ def test_search_index(): assert s2._index == ["i", "i2", "i3", "i4", "i5"] -def test_doc_type_document_class(): +def test_doc_type_document_class() -> None: class MyDocument(Document): pass @@ -257,7 +257,7 @@ def test_sort(): assert search.Search().to_dict() == s.to_dict() -def test_sort_by_score(): +def test_sort_by_score() -> None: s = search.Search() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -301,7 +301,7 @@ def test_collapse(): assert search.Search().to_dict() == s.to_dict() -def test_slice(): +def test_slice() -> None: s = search.Search() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -310,7 +310,7 @@ def test_slice(): assert {"from": 20, "size": 0} == s[20:0].to_dict() -def test_index(): +def test_index() -> None: s = search.Search() assert {"from": 3, "size": 1} == s[3].to_dict() @@ -445,13 +445,13 @@ def test_reverse(): assert d == s.to_dict() -def test_from_dict_doesnt_need_query(): +def test_from_dict_doesnt_need_query() -> None: s = search.Search.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() -def test_params_being_passed_to_search(mock_client): +def test_params_being_passed_to_search(mock_client) -> None: s = search.Search(using="mock") s = s.params(routing="42") s.execute() @@ -459,7 +459,7 @@ def test_params_being_passed_to_search(mock_client): mock_client.search.assert_called_once_with(index=None, body={}, routing="42") -def test_source(): +def test_source() -> None: assert {} == search.Search().source().to_dict() assert { @@ -488,7 +488,7 @@ def test_source_on_clone(): } == search.Search().source(False).filter("term", title="python").to_dict() -def test_source_on_clear(): +def test_source_on_clear() -> None: assert ( {} == search.Search() @@ -529,7 +529,7 @@ def test_suggest(): } == s.to_dict() -def test_exclude(): +def test_exclude() -> None: s = search.Search() s = s.exclude("match", title="python") @@ -542,7 +542,7 @@ def test_exclude(): } == s.to_dict() -def test_delete_by_query(mock_client): +def test_delete_by_query(mock_client) -> None: s = search.Search(using="mock").query("match", lang="java") s.delete() diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index 336f8fda..74030874 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -31,7 +31,7 @@ from opensearchpy.helpers.response import UpdateByQueryResponse -def test_ubq_starts_with_no_query(): +def test_ubq_starts_with_no_query() -> None: ubq = UpdateByQuery() assert ubq.query._proxied is None @@ -91,7 +91,7 @@ def test_complex_example(): } == ubq.to_dict() -def test_exclude(): +def test_exclude() -> None: ubq = UpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -140,13 +140,13 @@ def test_reverse(): assert d == ubq.to_dict() -def test_from_dict_doesnt_need_query(): +def test_from_dict_doesnt_need_query() -> None: ubq = UpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() -def test_params_being_passed_to_search(mock_client): +def test_params_being_passed_to_search(mock_client) -> None: ubq = UpdateByQuery(using="mock") ubq = ubq.params(routing="42") ubq.execute() @@ -172,7 +172,7 @@ def test_overwrite_script(): assert {"script": {"source": "ctx._source.likes++"}} == ubq.to_dict() -def test_update_by_query_response_success(): +def test_update_by_query_response_success() -> None: ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index c651fe2f..358b9184 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -33,21 +33,21 @@ from opensearchpy.helpers import utils -def test_attrdict_pickle(): +def test_attrdict_pickle() -> None: ad = utils.AttrDict({}) pickled_ad = pickle.dumps(ad) assert ad == pickle.loads(pickled_ad) -def test_attrlist_pickle(): +def test_attrlist_pickle() -> None: al = utils.AttrList([]) pickled_al = pickle.dumps(al) assert al == pickle.loads(pickled_al) -def test_attrlist_slice(): +def test_attrlist_slice() -> None: class MyAttrDict(utils.AttrDict): pass @@ -64,7 +64,7 @@ def test_merge(): assert a == {"a": {"b": 123, "c": 47, "d": -12}, "e": [1, 2, 3]} -def test_merge_conflict(): +def test_merge_conflict() -> None: for d in ( {"a": 42}, {"a": {"b": 47}}, @@ -74,7 +74,7 @@ def test_merge_conflict(): utils.merge({"a": {"b": 42}}, d, True) -def test_attrdict_bool(): +def test_attrdict_bool() -> None: d = utils.AttrDict({}) assert not d @@ -82,7 +82,7 @@ def test_attrdict_bool(): assert d -def test_attrlist_items_get_wrapped_during_iteration(): +def test_attrlist_items_get_wrapped_during_iteration() -> None: al = utils.AttrList([1, object(), [1], {}]) ls = list(iter(al)) @@ -91,7 +91,7 @@ def test_attrlist_items_get_wrapped_during_iteration(): assert isinstance(ls[3], utils.AttrDict) -def test_serializer_deals_with_Attr_versions(): +def test_serializer_deals_with_Attr_versions() -> None: d = utils.AttrDict({"key": utils.AttrList([1, 2, 3])}) assert serializer.serializer.dumps(d) == serializer.serializer.dumps( @@ -99,7 +99,7 @@ def test_serializer_deals_with_Attr_versions(): ) -def test_serializer_deals_with_objects_with_to_dict(): +def test_serializer_deals_with_objects_with_to_dict() -> None: class MyClass(object): def to_dict(self): return 42 @@ -107,13 +107,13 @@ def to_dict(self): assert serializer.serializer.dumps(MyClass()) == "42" -def test_recursive_to_dict(): +def test_recursive_to_dict() -> None: assert utils.recursive_to_dict({"k": [1, (1.0, {"v": Q("match", key="val")})]}) == { "k": [1, (1.0, {"v": {"match": {"key": "val"}}})] } -def test_attrdict_get(): +def test_attrdict_get() -> None: a = utils.AttrDict({"a": {"b": 42, "c": 47}}) assert a.get("a", {}).get("b", 0) == 42 assert a.get("a", {}).get("e", 0) == 0 diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index e8d9f5aa..1565b352 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -46,7 +46,7 @@ class Author(InnerDoc): name = Text(required=True) email = Text(required=True) - def clean(self): + def clean(self) -> None: print(self, type(self), self.name) if self.name.lower() not in self.email: raise ValidationException("Invalid email!") @@ -74,7 +74,7 @@ class Log(Document): data = Text() -def test_required_int_can_be_0(): +def test_required_int_can_be_0() -> None: class DT(Document): i = Integer(required=True) @@ -82,7 +82,7 @@ class DT(Document): assert dt.full_clean() is None -def test_required_field_cannot_be_empty_list(): +def test_required_field_cannot_be_empty_list() -> None: class DT(Document): i = Integer(required=True) @@ -91,7 +91,7 @@ class DT(Document): dt.full_clean() -def test_validation_works_for_lists_of_values(): +def test_validation_works_for_lists_of_values() -> None: class DT(Document): i = Date(required=True) @@ -103,21 +103,21 @@ class DT(Document): assert None is dt.full_clean() -def test_field_with_custom_clean(): +def test_field_with_custom_clean() -> None: ls = Log() ls.full_clean() assert isinstance(ls.timestamp, datetime) -def test_empty_object(): +def test_empty_object() -> None: d = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) d.inner = {} d.full_clean() -def test_missing_required_field_raises_validation_exception(): +def test_missing_required_field_raises_validation_exception() -> None: d = BlogPost() with raises(ValidationException): d.full_clean() @@ -132,7 +132,7 @@ def test_missing_required_field_raises_validation_exception(): d.full_clean() -def test_boolean_doesnt_treat_false_as_empty(): +def test_boolean_doesnt_treat_false_as_empty() -> None: d = BlogPostWithStatus() with raises(ValidationException): d.full_clean() @@ -142,7 +142,7 @@ def test_boolean_doesnt_treat_false_as_empty(): d.full_clean() -def test_custom_validation_on_nested_gets_run(): +def test_custom_validation_on_nested_gets_run() -> None: d = BlogPost(authors=[Author(name="Guian", email="king@example.com")], created=None) assert isinstance(d.authors[0], Author) @@ -151,7 +151,7 @@ def test_custom_validation_on_nested_gets_run(): d.full_clean() -def test_accessing_known_fields_returns_empty_value(): +def test_accessing_known_fields_returns_empty_value() -> None: d = BlogPost() assert [] == d.authors @@ -161,7 +161,7 @@ def test_accessing_known_fields_returns_empty_value(): assert None is d.authors[0].email -def test_empty_values_are_not_serialized(): +def test_empty_values_are_not_serialized() -> None: d = BlogPost( authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}], created=None ) diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index c49353c5..2212b070 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -44,7 +44,7 @@ ({"gt": datetime.now() - timedelta(seconds=10)}, datetime.now()), ], ) -def test_range_contains(kwargs, item): +def test_range_contains(kwargs, item) -> None: assert item in Range(**kwargs) @@ -72,7 +72,7 @@ def test_range_not_contains(kwargs, item): ((), {"gt": 1, "gte": 1}), ], ) -def test_range_raises_value_error_on_wrong_params(args, kwargs): +def test_range_raises_value_error_on_wrong_params(args, kwargs) -> None: with pytest.raises(ValueError): Range(*args, **kwargs) @@ -86,7 +86,7 @@ def test_range_raises_value_error_on_wrong_params(args, kwargs): (Range(lt=42), None, False), ], ) -def test_range_lower(range, lower, inclusive): +def test_range_lower(range, lower, inclusive) -> None: assert (lower, inclusive) == range.lower @@ -99,5 +99,5 @@ def test_range_lower(range, lower, inclusive): (Range(gt=42), None, False), ], ) -def test_range_upper(range, upper, inclusive): +def test_range_upper(range, upper, inclusive) -> None: assert (upper, inclusive) == range.upper diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index b324b53c..d7fef3e8 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -48,26 +48,26 @@ from .test_cases import SkipTest, TestCase -def requires_numpy_and_pandas(): +def requires_numpy_and_pandas() -> None: if np is None or pd is None: raise SkipTest("Test requires numpy or pandas to be available") class TestJSONSerializer(TestCase): - def test_datetime_serialization(self): + def test_datetime_serialization(self) -> None: self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": datetime(2010, 10, 1, 2, 30)}), ) - def test_decimal_serialization(self): + def test_decimal_serialization(self) -> None: requires_numpy_and_pandas() if sys.version_info[:2] == (2, 6): raise SkipTest("Float rounding is broken in 2.6.") self.assertEqual('{"d":3.8}', JSONSerializer().dumps({"d": Decimal("3.8")})) - def test_uuid_serialization(self): + def test_uuid_serialization(self) -> None: self.assertEqual( '{"d":"00000000-0000-0000-0000-000000000003"}', JSONSerializer().dumps( @@ -75,12 +75,12 @@ def test_uuid_serialization(self): ), ) - def test_serializes_numpy_bool(self): + def test_serializes_numpy_bool(self) -> None: requires_numpy_and_pandas() self.assertEqual('{"d":true}', JSONSerializer().dumps({"d": np.bool_(True)})) - def test_serializes_numpy_integers(self): + def test_serializes_numpy_integers(self) -> None: requires_numpy_and_pandas() ser = JSONSerializer() @@ -101,7 +101,7 @@ def test_serializes_numpy_integers(self): ): self.assertEqual(ser.dumps({"d": np_type(1)}), '{"d":1}') - def test_serializes_numpy_floats(self): + def test_serializes_numpy_floats(self) -> None: requires_numpy_and_pandas() ser = JSONSerializer() @@ -114,7 +114,7 @@ def test_serializes_numpy_floats(self): ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$' ) - def test_serializes_numpy_datetime(self): + def test_serializes_numpy_datetime(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -122,7 +122,7 @@ def test_serializes_numpy_datetime(self): JSONSerializer().dumps({"d": np.datetime64("2010-10-01T02:30:00")}), ) - def test_serializes_numpy_ndarray(self): + def test_serializes_numpy_ndarray(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -135,7 +135,7 @@ def test_serializes_numpy_ndarray(self): JSONSerializer().dumps({"d": np.zeros((2, 2), dtype=np.uint8)}), ) - def test_serializes_numpy_nan_to_nan(self): + def test_serializes_numpy_nan_to_nan(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -143,7 +143,7 @@ def test_serializes_numpy_nan_to_nan(self): JSONSerializer().dumps({"d": np.nan}), ) - def test_serializes_pandas_timestamp(self): + def test_serializes_pandas_timestamp(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -151,7 +151,7 @@ def test_serializes_pandas_timestamp(self): JSONSerializer().dumps({"d": pd.Timestamp("2010-10-01T02:30:00")}), ) - def test_serializes_pandas_series(self): + def test_serializes_pandas_series(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -159,7 +159,7 @@ def test_serializes_pandas_series(self): JSONSerializer().dumps({"d": pd.Series(["a", "b", "c", "d"])}), ) - def test_serializes_pandas_na(self): + def test_serializes_pandas_na(self) -> None: requires_numpy_and_pandas() if not hasattr(pd, "NA"): # pandas.NA added in v1 @@ -169,14 +169,14 @@ def test_serializes_pandas_na(self): JSONSerializer().dumps({"d": pd.NA}), ) - def test_raises_serialization_error_pandas_nat(self): + def test_raises_serialization_error_pandas_nat(self) -> None: requires_numpy_and_pandas() if not hasattr(pd, "NaT"): raise SkipTest("pandas.NaT required") self.assertRaises(SerializationError, JSONSerializer().dumps, {"d": pd.NaT}) - def test_serializes_pandas_category(self): + def test_serializes_pandas_category(self) -> None: requires_numpy_and_pandas() cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"]) @@ -191,34 +191,34 @@ def test_serializes_pandas_category(self): JSONSerializer().dumps({"d": cat}), ) - def test_raises_serialization_error_on_dump_error(self): + def test_raises_serialization_error_on_dump_error(self) -> None: self.assertRaises(SerializationError, JSONSerializer().dumps, object()) - def test_raises_serialization_error_on_load_error(self): + def test_raises_serialization_error_on_load_error(self) -> None: self.assertRaises(SerializationError, JSONSerializer().loads, object()) self.assertRaises(SerializationError, JSONSerializer().loads, "") self.assertRaises(SerializationError, JSONSerializer().loads, "{{") - def test_strings_are_left_untouched(self): + def test_strings_are_left_untouched(self) -> None: self.assertEqual("你好", JSONSerializer().dumps("你好")) class TestTextSerializer(TestCase): - def test_strings_are_left_untouched(self): + def test_strings_are_left_untouched(self) -> None: self.assertEqual("你好", TextSerializer().dumps("你好")) - def test_raises_serialization_error_on_dump_error(self): + def test_raises_serialization_error_on_dump_error(self) -> None: self.assertRaises(SerializationError, TextSerializer().dumps, {}) class TestDeserializer(TestCase): - def setup_method(self, _): + def setup_method(self, _) -> None: self.de = Deserializer(DEFAULT_SERIALIZERS) - def test_deserializes_json_by_default(self): + def test_deserializes_json_by_default(self) -> None: self.assertEqual({"some": "data"}, self.de.loads('{"some":"data"}')) - def test_deserializes_text_with_correct_ct(self): + def test_deserializes_text_with_correct_ct(self) -> None: self.assertEqual( '{"some":"data"}', self.de.loads('{"some":"data"}', "text/plain") ) @@ -227,10 +227,10 @@ def test_deserializes_text_with_correct_ct(self): self.de.loads('{"some":"data"}', "text/plain; charset=whatever"), ) - def test_raises_serialization_error_on_unknown_mimetype(self): + def test_raises_serialization_error_on_unknown_mimetype(self) -> None: self.assertRaises(SerializationError, self.de.loads, "{}", "text/html") def test_raises_improperly_configured_when_default_mimetype_cannot_be_deserialized( self, - ): + ) -> None: self.assertRaises(ImproperlyConfigured, Deserializer, {}) diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index 164e6a5d..d3965fed 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -60,7 +60,7 @@ def get_client(**kwargs): return new_client -def setup_module(): +def setup_module() -> None: get_client() diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index 2d5c4155..32550a03 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -32,19 +32,19 @@ class TestUnicode(OpenSearchTestCase): - def test_indices_analyze(self): + def test_indices_analyze(self) -> None: self.client.indices.analyze(body='{"text": "привет"}') class TestBulk(OpenSearchTestCase): - def test_bulk_works_with_string_body(self): + def test_bulk_works_with_string_body(self) -> None: docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = self.client.bulk(body=docs) self.assertFalse(response["errors"]) self.assertEqual(1, len(response["items"])) - def test_bulk_works_with_bytestring_body(self): + def test_bulk_works_with_bytestring_body(self) -> None: docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = self.client.bulk(body=docs) diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 4e167d34..8be79616 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -30,6 +30,7 @@ from pytest import fixture +from opensearchpy.client import OpenSearch from opensearchpy.connection.connections import add_connection from opensearchpy.helpers import bulk from opensearchpy.helpers.test import get_test_client @@ -45,7 +46,7 @@ @fixture(scope="session") -def client(): +def client() -> OpenSearch: client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", client) return client @@ -106,7 +107,7 @@ def pull_request(write_client): @fixture -def setup_ubq_tests(client): +def setup_ubq_tests(client) -> str: index = "test-git" create_git_index(client, index) bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index fcb65fde..7fb8f234 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -26,6 +26,8 @@ # under the License. +from typing import Tuple + from mock import patch from opensearchpy import TransportError, helpers @@ -37,8 +39,11 @@ class FailingBulkClient(object): def __init__( - self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) - ): + self, + client, + fail_at: Tuple[int] = (2,), + fail_with=TransportError(599, "Error!", {}), + ) -> None: self.client = client self._called = 0 self._fail_at = fail_at @@ -53,7 +58,7 @@ def bulk(self, *args, **kwargs): class TestStreamingBulk(OpenSearchTestCase): - def test_actions_remain_unchanged(self): + def test_actions_remain_unchanged(self) -> None: actions = [{"_id": 1}, {"_id": 2}] for ok, item in helpers.streaming_bulk( self.client, actions, index="test-index" @@ -61,7 +66,7 @@ def test_actions_remain_unchanged(self): self.assertTrue(ok) self.assertEqual([{"_id": 1}, {"_id": 2}], actions) - def test_all_documents_get_inserted(self): + def test_all_documents_get_inserted(self) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] for ok, item in helpers.streaming_bulk( self.client, docs, index="test-index", refresh=True @@ -73,7 +78,7 @@ def test_all_documents_get_inserted(self): {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) - def test_all_errors_from_chunk_are_raised_on_failure(self): + def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: self.client.indices.create( "i", { @@ -115,7 +120,7 @@ def test_different_op_types(self): self.assertEqual({"answer": 42}, self.client.get(index="i", id=42)["_source"]) self.assertEqual({"f": "v"}, self.client.get(index="i", id=47)["_source"]) - def test_transport_error_can_becaught(self): + def test_transport_error_can_becaught(self) -> None: failing_client = FailingBulkClient(self.client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -151,7 +156,7 @@ def test_transport_error_can_becaught(self): results[1][1], ) - def test_rejected_documents_are_retried(self): + def test_rejected_documents_are_retried(self) -> None: failing_client = FailingBulkClient( self.client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -178,7 +183,7 @@ def test_rejected_documents_are_retried(self): self.assertEqual({"value": 3, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) - def test_rejected_documents_are_retried_at_most_max_retries_times(self): + def test_rejected_documents_are_retried_at_most_max_retries_times(self) -> None: failing_client = FailingBulkClient( self.client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -206,7 +211,7 @@ def test_rejected_documents_are_retried_at_most_max_retries_times(self): self.assertEqual({"value": 2, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) - def test_transport_error_is_raised_with_max_retries(self): + def test_transport_error_is_raised_with_max_retries(self) -> None: failing_client = FailingBulkClient( self.client, fail_at=(1, 2, 3, 4), @@ -230,7 +235,7 @@ def streaming_bulk(): class TestBulk(OpenSearchTestCase): - def test_bulk_works_with_single_item(self): + def test_bulk_works_with_single_item(self) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -243,7 +248,7 @@ def test_bulk_works_with_single_item(self): {"answer": 42}, self.client.get(index="test-index", id=1)["_source"] ) - def test_all_documents_get_inserted(self): + def test_all_documents_get_inserted(self) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -256,7 +261,7 @@ def test_all_documents_get_inserted(self): {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) - def test_stats_only_reports_numbers(self): + def test_stats_only_reports_numbers(self) -> None: docs = [{"answer": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True, stats_only=True @@ -293,7 +298,7 @@ def test_errors_are_reported_correctly(self): or "mapper_parsing_exception" in repr(error["index"]["error"]) ) - def test_error_is_raised(self): + def test_error_is_raised(self) -> None: self.client.indices.create( "i", { @@ -379,7 +384,7 @@ class TestScan(OpenSearchTestCase): }, ] - def teardown_method(self, m): + def teardown_method(self, m) -> None: self.client.transport.perform_request("DELETE", "/_search/scroll/_all") super(TestScan, self).teardown_method(m) @@ -477,7 +482,7 @@ def test_initial_search_error(self): self.assertEqual(data, [{"search_data": 1}]) client_mock.scroll.assert_not_called() - def test_no_scroll_id_fast_route(self): + def test_no_scroll_id_fast_route(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = {"no": "_scroll_id"} data = list(helpers.scan(self.client, index="test_index")) @@ -654,7 +659,7 @@ def setup_method(self, _): ) self.client.bulk(bulk, refresh=True) - def test_reindex_passes_kwargs_to_scan_and_bulk(self): + def test_reindex_passes_kwargs_to_scan_and_bulk(self) -> None: helpers.reindex( self.client, "test_index", @@ -673,7 +678,7 @@ def test_reindex_passes_kwargs_to_scan_and_bulk(self): self.client.get(index="prod_index", id=42)["_source"], ) - def test_reindex_accepts_a_query(self): + def test_reindex_accepts_a_query(self) -> None: helpers.reindex( self.client, "test_index", @@ -692,7 +697,7 @@ def test_reindex_accepts_a_query(self): self.client.get(index="prod_index", id=42)["_source"], ) - def test_all_documents_get_moved(self): + def test_all_documents_get_moved(self) -> None: helpers.reindex(self.client, "test_index", "prod_index") self.client.indices.refresh() @@ -737,7 +742,7 @@ def setup_method(self, _): ) self.client.indices.refresh(index="test-index") - def test_children_are_reindexed_correctly(self): + def test_children_are_reindexed_correctly(self) -> None: helpers.reindex(self.client, "test-index", "real-index") self.assertEqual( diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index d0073c53..2da9388a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -28,7 +28,7 @@ from opensearchpy import analyzer, token_filter, tokenizer -def test_simulate_with_just__builtin_tokenizer(client): +def test_simulate_with_just__builtin_tokenizer(client) -> None: a = analyzer("my-analyzer", tokenizer="keyword") tokens = a.simulate("Hello World!", using=client).tokens @@ -36,7 +36,7 @@ def test_simulate_with_just__builtin_tokenizer(client): assert tokens[0].token == "Hello World!" -def test_simulate_complex(client): +def test_simulate_complex(client) -> None: a = analyzer( "my-analyzer", tokenizer=tokenizer("split_words", "simple_pattern_split", pattern=":"), @@ -49,7 +49,7 @@ def test_simulate_complex(client): assert ["this", "works"] == [t.token for t in tokens] -def test_simulate_builtin(client): +def test_simulate_builtin(client) -> None: a = analyzer("my-analyzer", "english") tokens = a.simulate("fixes running").tokens diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index 6a507a9f..7bf9c27e 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -28,12 +28,12 @@ from opensearchpy.helpers.search import Q, Search -def test_count_all(data_client): +def test_count_all(data_client) -> None: s = Search(using=data_client).index("git") assert 53 == s.count() -def test_count_prefetch(data_client, mocker): +def test_count_prefetch(data_client, mocker) -> None: mocker.spy(data_client, "count") search = Search(using=data_client).index("git") @@ -46,7 +46,7 @@ def test_count_prefetch(data_client, mocker): assert data_client.count.call_count == 1 -def test_count_filter(data_client): +def test_count_filter(data_client) -> None: s = Search(using=data_client).index("git").filter(~Q("exists", field="parent_shas")) # initial commit + repo document assert 2 == s.count() diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 91e816b4..63302b7a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -27,6 +27,8 @@ from __future__ import unicode_literals +from typing import Any, Dict + def create_flat_git_index(client, index): # we will use user on several places @@ -1093,7 +1095,7 @@ def create_git_index(client, index): ] -def flatten_doc(d): +def flatten_doc(d) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1102,7 +1104,7 @@ def flatten_doc(d): FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d): +def create_test_git_data(d) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index f459afb2..0da4b856 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -161,7 +161,7 @@ def test_serialization(write_client): } -def test_nested_inner_hits_are_wrapped_properly(pull_request): +def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: history_query = Q( "nested", path="comments.history", @@ -189,7 +189,7 @@ def test_nested_inner_hits_are_wrapped_properly(pull_request): assert "score" in history.meta -def test_nested_inner_hits_are_deserialized_properly(pull_request): +def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -204,7 +204,7 @@ def test_nested_inner_hits_are_deserialized_properly(pull_request): assert isinstance(pr.comments[0].created_at, datetime) -def test_nested_top_hits_are_wrapped_properly(pull_request): +def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -216,7 +216,7 @@ def test_nested_top_hits_are_wrapped_properly(pull_request): assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -def test_update_object_field(write_client): +def test_update_object_field(write_client) -> None: Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -236,7 +236,7 @@ def test_update_object_field(write_client): assert w.ranked == {"test1": 0.1, "topic2": 0.2} -def test_update_script(write_client): +def test_update_script(write_client) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -246,7 +246,7 @@ def test_update_script(write_client): assert w.views == 47 -def test_update_retry_on_conflict(write_client): +def test_update_retry_on_conflict(write_client) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -261,7 +261,7 @@ def test_update_retry_on_conflict(write_client): @pytest.mark.parametrize("retry_on_conflict", [None, 0]) -def test_update_conflicting_version(write_client, retry_on_conflict): +def test_update_conflicting_version(write_client, retry_on_conflict) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -278,7 +278,7 @@ def test_update_conflicting_version(write_client, retry_on_conflict): ) -def test_save_and_update_return_doc_meta(write_client): +def test_save_and_update_return_doc_meta(write_client) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = w.save(return_doc_meta=True) @@ -302,31 +302,31 @@ def test_save_and_update_return_doc_meta(write_client): assert resp.keys().__contains__("_version") -def test_init(write_client): +def test_init(write_client) -> None: Repository.init(index="test-git") assert write_client.indices.exists(index="test-git") -def test_get_raises_404_on_index_missing(data_client): +def test_get_raises_404_on_index_missing(data_client) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php", index="not-there") -def test_get_raises_404_on_non_existent_id(data_client): +def test_get_raises_404_on_non_existent_id(data_client) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php") -def test_get_returns_none_if_404_ignored(data_client): +def test_get_returns_none_if_404_ignored(data_client) -> None: assert None is Repository.get("opensearch-dsl-php", ignore=404) -def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client): +def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client) -> None: assert None is Repository.get("42", index="not-there", ignore=404) -def test_get(data_client): +def test_get(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -334,15 +334,15 @@ def test_get(data_client): assert datetime(2014, 3, 3) == opensearch_repo.created_at -def test_exists_return_true(data_client): +def test_exists_return_true(data_client) -> None: assert Repository.exists("opensearch-py") -def test_exists_false(data_client): +def test_exists_false(data_client) -> None: assert not Repository.exists("opensearch-dsl-php") -def test_get_with_tz_date(data_client): +def test_get_with_tz_date(data_client) -> None: first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -354,7 +354,7 @@ def test_get_with_tz_date(data_client): ) -def test_save_with_tz_date(data_client): +def test_save_with_tz_date(data_client) -> None: tzinfo = timezone("Europe/Prague") first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -381,7 +381,7 @@ def test_save_with_tz_date(data_client): ] -def test_mget(data_client): +def test_mget(data_client) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -389,23 +389,23 @@ def test_mget(data_client): assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_mget_raises_exception_when_missing_param_is_invalid(data_client): +def test_mget_raises_exception_when_missing_param_is_invalid(data_client) -> None: with raises(ValueError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -def test_mget_raises_404_when_missing_param_is_raise(data_client): +def test_mget_raises_404_when_missing_param_is_raise(data_client) -> None: with raises(NotFoundError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") -def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client): +def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_update_works_from_search_response(data_client): +def test_update_works_from_search_response(data_client) -> None: opensearch_repo = Repository.search().execute()[0] opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -416,7 +416,7 @@ def test_update_works_from_search_response(data_client): assert "opensearch" == new_version.owner.name -def test_update(data_client): +def test_update(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -440,7 +440,7 @@ def test_update(data_client): assert "primary_term" in new_version.meta -def test_save_updates_existing_doc(data_client): +def test_save_updates_existing_doc(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -453,7 +453,7 @@ def test_save_updates_existing_doc(data_client): assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -def test_save_automatically_uses_seq_no_and_primary_term(data_client): +def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -461,7 +461,7 @@ def test_save_automatically_uses_seq_no_and_primary_term(data_client): opensearch_repo.save() -def test_delete_automatically_uses_seq_no_and_primary_term(data_client): +def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -469,7 +469,7 @@ def test_delete_automatically_uses_seq_no_and_primary_term(data_client): opensearch_repo.delete() -def assert_doc_equals(expected, actual): +def assert_doc_equals(expected, actual) -> None: for f in expected: assert f in actual assert actual[f] == expected[f] @@ -490,7 +490,7 @@ def test_can_save_to_different_index(write_client): ) -def test_save_without_skip_empty_will_include_empty_fields(write_client): +def test_save_without_skip_empty_will_include_empty_fields(write_client) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert test_repo.save(index="test-document", skip_empty=False) @@ -505,7 +505,7 @@ def test_save_without_skip_empty_will_include_empty_fields(write_client): ) -def test_delete(write_client): +def test_delete(write_client) -> None: write_client.create( index="test-document", id="opensearch-py", @@ -526,11 +526,11 @@ def test_delete(write_client): ) -def test_search(data_client): +def test_search(data_client) -> None: assert Repository.search().count() == 1 -def test_search_returns_proper_doc_classes(data_client): +def test_search_returns_proper_doc_classes(data_client) -> None: result = Repository.search().execute() opensearch_repo = result.hits[0] @@ -539,7 +539,7 @@ def test_search_returns_proper_doc_classes(data_client): assert opensearch_repo.owner.name == "opensearch" -def test_refresh_mapping(data_client): +def test_refresh_mapping(data_client) -> None: class Commit(Document): class Index: name = "git" @@ -553,7 +553,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -def test_highlight_in_meta(data_client): +def test_highlight_in_meta(data_client) -> None: commit = ( Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index f7469d18..4656d4b2 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -131,7 +131,7 @@ class PRSearch(FacetedSearch): return PRSearch -def test_facet_with_custom_metric(data_client): +def test_facet_with_custom_metric(data_client) -> None: ms = MetricSearch() r = ms.execute() @@ -140,7 +140,7 @@ def test_facet_with_custom_metric(data_client): assert dates[0] == 1399038439000 -def test_nested_facet(pull_request, pr_search_cls): +def test_nested_facet(pull_request, pr_search_cls) -> None: prs = pr_search_cls() r = prs.execute() @@ -148,7 +148,7 @@ def test_nested_facet(pull_request, pr_search_cls): assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -def test_nested_facet_with_filter(pull_request, pr_search_cls): +def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = prs.execute() @@ -160,7 +160,7 @@ def test_nested_facet_with_filter(pull_request, pr_search_cls): assert not r.hits -def test_datehistogram_facet(data_client, repo_search_cls): +def test_datehistogram_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = rs.execute() @@ -168,7 +168,7 @@ def test_datehistogram_facet(data_client, repo_search_cls): assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -def test_boolean_facet(data_client, repo_search_cls): +def test_boolean_facet(data_client, repo_search_cls) -> None: rs = repo_search_cls() r = rs.execute() @@ -180,7 +180,7 @@ def test_boolean_facet(data_client, repo_search_cls): def test_empty_search_finds_everything( data_client, opensearch_version, commit_search_cls -): +) -> None: cs = commit_search_cls() r = cs.execute() assert r.hits.total.value == 52 @@ -226,7 +226,7 @@ def test_empty_search_finds_everything( def test_term_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = cs.execute() @@ -272,7 +272,7 @@ def test_term_filters_are_shown_as_selected_and_data_is_filtered( def test_range_filters_are_shown_as_selected_and_data_is_filtered( data_client, commit_search_cls -): +) -> None: cs = commit_search_cls(filters={"deletions": "better"}) r = cs.execute() @@ -280,7 +280,7 @@ def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -def test_pagination(data_client, commit_search_cls): +def test_pagination(data_client, commit_search_cls) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 84525b01..8593459c 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -34,7 +34,7 @@ class Post(Document): published_from = Date() -def test_index_template_works(write_client): +def test_index_template_works(write_client) -> None: it = IndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -55,7 +55,7 @@ def test_index_template_works(write_client): } == write_client.indices.get_mapping(index="test-blog") -def test_index_can_be_saved_even_with_settings(write_client): +def test_index_can_be_saved_even_with_settings(write_client) -> None: i = Index("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) i.save() @@ -67,12 +67,12 @@ def test_index_can_be_saved_even_with_settings(write_client): ) -def test_index_exists(data_client): +def test_index_exists(data_client) -> None: assert Index("git").exists() assert not Index("not-there").exists() -def test_index_can_be_created_with_settings_and_mappings(write_client): +def test_index_can_be_created_with_settings_and_mappings(write_client) -> None: i = Index("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -97,7 +97,7 @@ def test_index_can_be_created_with_settings_and_mappings(write_client): } -def test_delete(write_client): +def test_delete(write_client) -> None: write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -108,7 +108,7 @@ def test_delete(write_client): assert not write_client.indices.exists(index="test-index") -def test_multiple_indices_with_same_doc_type_work(write_client): +def test_multiple_indices_with_same_doc_type_work(write_client) -> None: i1 = Index("test-index-1", using=write_client) i2 = Index("test-index-2", using=write_client) diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index a9278159..50a80dea 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -31,7 +31,7 @@ from opensearchpy.helpers import analysis, mapping -def test_mapping_saved_into_opensearch(write_client): +def test_mapping_saved_into_opensearch(write_client) -> None: m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -51,7 +51,9 @@ def test_mapping_saved_into_opensearch(write_client): } == write_client.indices.get_mapping(index="test-mapping") -def test_mapping_saved_into_opensearch_when_index_already_exists_closed(write_client): +def test_mapping_saved_into_opensearch_when_index_already_exists_closed( + write_client, +) -> None: m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -76,7 +78,7 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_closed(write_cl def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( write_client, -): +) -> None: m = mapping.Mapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index 90aabbc0..5e45645a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -62,7 +62,7 @@ class Index: name = "flat-git" -def test_filters_aggregation_buckets_are_accessible(data_client): +def test_filters_aggregation_buckets_are_accessible(data_client) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -83,7 +83,7 @@ def test_filters_aggregation_buckets_are_accessible(data_client): ) -def test_top_hits_are_wrapped_in_response(data_client): +def test_top_hits_are_wrapped_in_response(data_client) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -99,7 +99,7 @@ def test_top_hits_are_wrapped_in_response(data_client): assert isinstance(hits[0], Commit) -def test_inner_hits_are_wrapped_in_response(data_client): +def test_inner_hits_are_wrapped_in_response(data_client) -> None: s = Search(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -110,7 +110,7 @@ def test_inner_hits_are_wrapped_in_response(data_client): assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: repos = list(Repository.search().scan()) assert 1 == len(repos) @@ -118,7 +118,7 @@ def test_scan_respects_doc_types(data_client): assert repos[0].organization == "opensearch" -def test_scan_iterates_through_all_docs(data_client): +def test_scan_iterates_through_all_docs(data_client) -> None: s = Search(index="flat-git") commits = list(s.scan()) @@ -127,7 +127,7 @@ def test_scan_iterates_through_all_docs(data_client): assert {d["_id"] for d in FLAT_DATA} == {c.meta.id for c in commits} -def test_response_is_cached(data_client): +def test_response_is_cached(data_client) -> None: s = Repository.search() repos = list(s) @@ -135,7 +135,7 @@ def test_response_is_cached(data_client): assert s._response.hits == repos -def test_multi_search(data_client): +def test_multi_search(data_client) -> None: s1 = Repository.search() s2 = Search(index="flat-git") @@ -152,7 +152,7 @@ def test_multi_search(data_client): assert r2._search is s2 -def test_multi_missing(data_client): +def test_multi_missing(data_client) -> None: s1 = Repository.search() s2 = Search(index="flat-git") s3 = Search(index="does_not_exist") @@ -175,7 +175,7 @@ def test_multi_missing(data_client): assert r3 is None -def test_raw_subfield_can_be_used_in_aggs(data_client): +def test_raw_subfield_can_be_used_in_aggs(data_client) -> None: s = Search(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index 81a75802..fb46e956 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -29,7 +29,7 @@ from opensearchpy.helpers.update_by_query import UpdateByQuery -def test_update_by_query_no_script(write_client, setup_ubq_tests): +def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -48,7 +48,7 @@ def test_update_by_query_no_script(write_client, setup_ubq_tests): assert response.success() -def test_update_by_query_with_script(write_client, setup_ubq_tests): +def test_update_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( @@ -65,7 +65,7 @@ def test_update_by_query_with_script(write_client, setup_ubq_tests): assert response.version_conflicts == 0 -def test_delete_by_query_with_script(write_client, setup_ubq_tests): +def test_delete_by_query_with_script(write_client, setup_ubq_tests) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index 3a503e43..d127edb1 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -39,7 +39,7 @@ def test_create_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_get_destination(self): + def test_get_destination(self) -> None: # Create a dummy destination self.test_create_destination() @@ -119,7 +119,7 @@ def test_create_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_search_monitor(self): + def test_search_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() @@ -137,7 +137,7 @@ def test_search_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_get_monitor(self): + def test_get_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() @@ -161,7 +161,7 @@ def test_get_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_run_monitor(self): + def test_run_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() diff --git a/test_opensearchpy/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_server/test_plugins/test_index_management.py index 68f61c7b..ed8c0b57 100644 --- a/test_opensearchpy/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_server/test_plugins/test_index_management.py @@ -64,7 +64,7 @@ class TestIndexManagementPlugin(OpenSearchTestCase): } } - def test_create_policy(self): + def test_create_policy(self) -> None: # Test to create policy response = self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -73,7 +73,7 @@ def test_create_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - def test_get_policy(self): + def test_get_policy(self) -> None: # Create a policy self.test_create_policy() @@ -84,7 +84,7 @@ def test_get_policy(self): self.assertIn("_id", response) self.assertEqual(response["_id"], self.POLICY_NAME) - def test_update_policy(self): + def test_update_policy(self) -> None: # Create a policy self.test_create_policy() @@ -106,7 +106,7 @@ def test_update_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - def test_delete_policy(self): + def test_delete_policy(self) -> None: # Create a policy self.test_create_policy() diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index e4c5cb3f..ba16d044 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -142,7 +142,7 @@ class YamlRunner: - def __init__(self, client): + def __init__(self, client) -> None: self.client = client self.last_response = None @@ -151,7 +151,7 @@ def __init__(self, client): self._teardown_code = None self._state = {} - def use_spec(self, test_spec): + def use_spec(self, test_spec) -> None: self._setup_code = test_spec.pop("setup", None) self._run_code = test_spec.pop("run", None) self._teardown_code = test_spec.pop("teardown", None) @@ -174,7 +174,7 @@ def setup(self): if self._setup_code: self.run_code(self._setup_code) - def teardown(self): + def teardown(self) -> None: if self._teardown_code: self.section("teardown") self.run_code(self._teardown_code) @@ -189,10 +189,10 @@ def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 99 for v in version) return OPENSEARCH_VERSION - def section(self, name): + def section(self, name) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - def run(self): + def run(self) -> None: try: self.setup() self.section("test") @@ -203,7 +203,7 @@ def run(self): except Exception: pass - def run_code(self, test): + def run_code(self, test) -> None: """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 @@ -215,7 +215,7 @@ def run_code(self, test): else: raise RuntimeError("Invalid action type %r" % (action_type,)) - def run_do(self, action): + def run_do(self, action) -> None: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -281,7 +281,7 @@ def run_do(self, action): % (warn, caught_warnings) ) - def run_catch(self, catch, exception): + def run_catch(self, catch, exception) -> None: if catch == "param": assert isinstance(exception, TypeError) return @@ -296,7 +296,7 @@ def run_catch(self, catch, exception): ) is not None self.last_response = exception.info - def run_skip(self, skip): + def run_skip(self, skip) -> None: global IMPLEMENTED_FEATURES if "features" in skip: @@ -318,32 +318,32 @@ def run_skip(self, skip): if min_version <= (self.opensearch_version()) <= max_version: pytest.skip(reason) - def run_gt(self, action): + def run_gt(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) > value - def run_gte(self, action): + def run_gte(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) >= value - def run_lt(self, action): + def run_lt(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) < value - def run_lte(self, action): + def run_lte(self, action) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) <= value - def run_set(self, action): + def run_set(self, action) -> None: for key, value in action.items(): value = self._resolve(value) self._state[value] = self._lookup(key) - def run_is_false(self, action): + def run_is_false(self, action) -> None: try: value = self._lookup(action) except AssertionError: @@ -351,17 +351,17 @@ def run_is_false(self, action): else: assert value in FALSEY_VALUES - def run_is_true(self, action): + def run_is_true(self, action) -> None: value = self._lookup(action) assert value not in FALSEY_VALUES - def run_length(self, action): + def run_length(self, action) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) assert expected == len(value) - def run_match(self, action): + def run_match(self, action) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) @@ -379,7 +379,7 @@ def run_match(self, action): else: self._assert_match_equals(value, expected) - def run_contains(self, action): + def run_contains(self, action) -> None: for path, expected in action.items(): value = self._lookup(path) # list[dict[str,str]] is returned expected = self._resolve(expected) # dict[str, str] @@ -387,7 +387,7 @@ def run_contains(self, action): if expected not in value: raise AssertionError("%s is not contained by %s" % (expected, value)) - def run_transform_and_set(self, action): + def run_transform_and_set(self, action) -> None: for key, value in action.items(): # Convert #base64EncodeCredentials(id,api_key) to ["id", "api_key"] if "#base64EncodeCredentials" in value: @@ -449,10 +449,10 @@ def _lookup(self, path): value = value[step] return value - def _feature_enabled(self, name): + def _feature_enabled(self, name) -> bool: return False - def _assert_match_equals(self, a, b): + def _assert_match_equals(self, a, b) -> None: # Handle for large floating points with 'E' if isinstance(b, string_types) and isinstance(a, float) and "e" in repr(a): a = repr(a).replace("e+", "E") @@ -533,7 +533,7 @@ def sync_runner(sync_client): if not RUN_ASYNC_REST_API_TESTS: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - def test_rest_api_spec(test_spec, sync_runner): + def test_rest_api_spec(test_spec, sync_runner) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") sync_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_server_secured/test_clients.py b/test_opensearchpy/test_server_secured/test_clients.py index e597c6ac..94684ffb 100644 --- a/test_opensearchpy/test_server_secured/test_clients.py +++ b/test_opensearchpy/test_server_secured/test_clients.py @@ -15,7 +15,7 @@ class TestSecurity(TestCase): - def test_security(self): + def test_security(self) -> None: client = OpenSearch( OPENSEARCH_URL, http_auth=("admin", "admin"), diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 90283af8..5c719953 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -36,15 +36,15 @@ class TestSecurityPlugin(TestCase): USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - def setUp(self): + def setUp(self) -> None: self.client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", self.client) - def tearDown(self): + def tearDown(self) -> None: if self.client: self.client.close() - def test_create_role(self): + def test_create_role(self) -> None: # Test to create role response = self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -53,7 +53,7 @@ def test_create_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_create_role_with_body_param_empty(self): + def test_create_role_with_body_param_empty(self) -> None: try: self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -61,7 +61,7 @@ def test_create_role_with_body_param_empty(self): else: assert False - def test_get_role(self): + def test_get_role(self) -> None: # Create a role self.test_create_role() @@ -71,7 +71,7 @@ def test_get_role(self): self.assertNotIn("errors", response) self.assertIn(self.ROLE_NAME, response) - def test_update_role(self): + def test_update_role(self) -> None: # Create a role self.test_create_role() @@ -84,7 +84,7 @@ def test_update_role(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_delete_role(self): + def test_delete_role(self) -> None: # Create a role self.test_create_role() @@ -97,7 +97,7 @@ def test_delete_role(self): with self.assertRaises(NotFoundError): response = self.client.security.get_role(self.ROLE_NAME) - def test_create_user(self): + def test_create_user(self) -> None: # Test to create user response = self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -106,7 +106,7 @@ def test_create_user(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_create_user_with_body_param_empty(self): + def test_create_user_with_body_param_empty(self) -> None: try: self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -129,7 +129,7 @@ def test_create_user_with_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_get_user(self): + def test_get_user(self) -> None: # Create a user self.test_create_user() @@ -139,7 +139,7 @@ def test_get_user(self): self.assertNotIn("errors", response) self.assertIn(self.USER_NAME, response) - def test_update_user(self): + def test_update_user(self) -> None: # Create a user self.test_create_user() @@ -152,7 +152,7 @@ def test_update_user(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_delete_user(self): + def test_delete_user(self) -> None: # Create a user self.test_create_user() @@ -165,12 +165,12 @@ def test_delete_user(self): with self.assertRaises(NotFoundError): response = self.client.security.get_user(self.USER_NAME) - def test_health_check(self): + def test_health_check(self) -> None: response = self.client.security.health_check() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) - def test_health(self): + def test_health(self) -> None: response = self.client.security.health() self.assertNotIn("errors", response) self.assertEqual("UP", response.get("status")) @@ -203,14 +203,14 @@ def test_health(self): }, } - def test_update_audit_config(self): + def test_update_audit_config(self) -> None: response = self.client.security.update_audit_config( body=self.AUDIT_CONFIG_SETTINGS ) self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_update_audit_configuration(self): + def test_update_audit_configuration(self) -> None: response = self.client.security.update_audit_configuration( body=self.AUDIT_CONFIG_SETTINGS ) diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 2c0892cf..a69a7cf0 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -42,7 +42,7 @@ class DummyConnection(Connection): - def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) @@ -108,7 +108,7 @@ def perform_request(self, *args, **kwargs): class TestHostsInfoCallback(TestCase): - def test_cluster_manager_only_nodes_are_ignored(self): + def test_cluster_manager_only_nodes_are_ignored(self) -> None: nodes = [ {"roles": ["cluster_manager"]}, {"roles": ["cluster_manager", "data", "ingest"]}, @@ -125,13 +125,13 @@ def test_cluster_manager_only_nodes_are_ignored(self): class TestTransport(TestCase): - def test_single_connection_uses_dummy_connection_pool(self): + def test_single_connection_uses_dummy_connection_pool(self) -> None: t = Transport([{}]) self.assertIsInstance(t.connection_pool, DummyConnectionPool) t = Transport([{"host": "localhost"}]) self.assertIsInstance(t.connection_pool, DummyConnectionPool) - def test_request_timeout_extracted_from_params_and_passed(self): + def test_request_timeout_extracted_from_params_and_passed(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"request_timeout": 42}) @@ -142,7 +142,7 @@ def test_request_timeout_extracted_from_params_and_passed(self): t.get_connection().calls[0][1], ) - def test_timeout_extracted_from_params_and_passed(self): + def test_timeout_extracted_from_params_and_passed(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"timeout": 84}) @@ -153,7 +153,7 @@ def test_timeout_extracted_from_params_and_passed(self): t.get_connection().calls[0][1], ) - def test_opaque_id(self): + def test_opaque_id(self) -> None: t = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) t.perform_request("GET", "/") @@ -173,7 +173,7 @@ def test_opaque_id(self): t.get_connection().calls[1][1], ) - def test_request_with_custom_user_agent_header(self): + def test_request_with_custom_user_agent_header(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", headers={"user-agent": "my-custom-value/1.2.3"}) @@ -187,7 +187,7 @@ def test_request_with_custom_user_agent_header(self): t.get_connection().calls[0][1], ) - def test_send_get_body_as_source(self): + def test_send_get_body_as_source(self) -> None: t = Transport([{}], send_get_body_as="source", connection_class=DummyConnection) t.perform_request("GET", "/", body={}) @@ -196,14 +196,14 @@ def test_send_get_body_as_source(self): ("GET", "/", {"source": "{}"}, None), t.get_connection().calls[0][0] ) - def test_send_get_body_as_post(self): + def test_send_get_body_as_post(self) -> None: t = Transport([{}], send_get_body_as="POST", connection_class=DummyConnection) t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("POST", "/", None, b"{}"), t.get_connection().calls[0][0]) - def test_body_gets_encoded_into_bytes(self): + def test_body_gets_encoded_into_bytes(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好") @@ -213,7 +213,7 @@ def test_body_gets_encoded_into_bytes(self): t.get_connection().calls[0][0], ) - def test_body_bytes_get_passed_untouched(self): + def test_body_bytes_get_passed_untouched(self) -> None: t = Transport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" @@ -221,7 +221,7 @@ def test_body_bytes_get_passed_untouched(self): self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("GET", "/", None, body), t.get_connection().calls[0][0]) - def test_body_surrogates_replaced_encoded_into_bytes(self): + def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: t = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好\uda6a") @@ -231,17 +231,17 @@ def test_body_surrogates_replaced_encoded_into_bytes(self): t.get_connection().calls[0][0], ) - def test_kwargs_passed_on_to_connections(self): + def test_kwargs_passed_on_to_connections(self) -> None: t = Transport([{"host": "google.com"}], port=123) self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://google.com:123", t.connection_pool.connections[0].host) - def test_kwargs_passed_on_to_connection_pool(self): + def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() t = Transport([{}, {}], dead_timeout=dt) self.assertIs(dt, t.connection_pool.dead_timeout) - def test_custom_connection_class(self): + def test_custom_connection_class(self) -> None: class MyConnection(object): def __init__(self, **kwargs): self.kwargs = kwargs @@ -250,7 +250,7 @@ def __init__(self, **kwargs): self.assertEqual(1, len(t.connection_pool.connections)) self.assertIsInstance(t.connection_pool.connections[0], MyConnection) - def test_add_connection(self): + def test_add_connection(self) -> None: t = Transport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) @@ -259,7 +259,7 @@ def test_add_connection(self): "http://google.com:1234", t.connection_pool.connections[1].host ) - def test_request_will_fail_after_X_retries(self): + def test_request_will_fail_after_X_retries(self) -> None: t = Transport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, @@ -268,7 +268,7 @@ def test_request_will_fail_after_X_retries(self): self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(4, len(t.get_connection().calls)) - def test_failed_connection_will_be_marked_as_dead(self): + def test_failed_connection_will_be_marked_as_dead(self) -> None: t = Transport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, @@ -277,7 +277,7 @@ def test_failed_connection_will_be_marked_as_dead(self): self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(0, len(t.connection_pool.connections)) - def test_resurrected_connection_will_be_marked_as_live_on_success(self): + def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: for method in ("GET", "HEAD"): t = Transport([{}, {}], connection_class=DummyConnection) con1 = t.connection_pool.get_connection() @@ -289,7 +289,7 @@ def test_resurrected_connection_will_be_marked_as_live_on_success(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual(1, len(t.connection_pool.dead_count)) - def test_sniff_will_use_seed_connections(self): + def test_sniff_will_use_seed_connections(self) -> None: t = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) t.set_connections([{"data": "invalid"}]) @@ -297,7 +297,7 @@ def test_sniff_will_use_seed_connections(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) - def test_sniff_on_start_fetches_and_uses_nodes_list(self): + def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -306,7 +306,7 @@ def test_sniff_on_start_fetches_and_uses_nodes_list(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) - def test_sniff_on_start_ignores_sniff_timeout(self): + def test_sniff_on_start_ignores_sniff_timeout(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -318,7 +318,7 @@ def test_sniff_on_start_ignores_sniff_timeout(self): t.seed_connections[0].calls[0], ) - def test_sniff_uses_sniff_timeout(self): + def test_sniff_uses_sniff_timeout(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -373,7 +373,7 @@ def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): self.assertEqual(1, len(conn_err.calls)) self.assertEqual(1, len(conn_data.calls)) - def test_sniff_after_n_seconds(self): + def test_sniff_after_n_seconds(self) -> None: t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, @@ -391,7 +391,7 @@ def test_sniff_after_n_seconds(self): self.assertEqual("http://1.1.1.1:123", t.get_connection().host) self.assertTrue(time.time() - 1 < t.last_sniff < time.time() + 0.01) - def test_sniff_7x_publish_host(self): + def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t = Transport( diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index 41497808..5aa4983b 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -31,7 +31,7 @@ from opensearchpy import OpenSearch -def wipe_cluster(client): +def wipe_cluster(client) -> None: """Wipes a cluster clean between test cases""" close_after_wipe = False try: @@ -59,7 +59,7 @@ def wipe_cluster(client): client.close() -def wipe_cluster_settings(client): +def wipe_cluster_settings(client) -> None: settings = client.cluster.get_settings() new_settings = {} for name, value in settings.items(): @@ -96,14 +96,14 @@ def wipe_snapshots(client): assert in_progress_snapshots == [] -def wipe_data_streams(client): +def wipe_data_streams(client) -> None: try: client.indices.delete_data_stream(name="*", expand_wildcards="all") except Exception: client.indices.delete_data_stream(name="*") -def wipe_indices(client): +def wipe_indices(client) -> None: client.indices.delete( index="*,-.ds-ilm-history-*", expand_wildcards="all", @@ -111,7 +111,7 @@ def wipe_indices(client): ) -def wipe_searchable_snapshot_indices(client): +def wipe_searchable_snapshot_indices(client) -> None: cluster_metadata = client.cluster.state( metric="metadata", filter_path="metadata.indices.*.settings.index.store.snapshot", @@ -121,17 +121,17 @@ def wipe_searchable_snapshot_indices(client): client.indices.delete(index=index) -def wipe_slm_policies(client): +def wipe_slm_policies(client) -> None: for policy in client.slm.get_lifecycle(): client.slm.delete_lifecycle(policy_id=policy["name"]) -def wipe_auto_follow_patterns(client): +def wipe_auto_follow_patterns(client) -> None: for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: client.ccr.delete_auto_follow_pattern(name=pattern["name"]) -def wipe_node_shutdown_metadata(client): +def wipe_node_shutdown_metadata(client) -> None: shutdown_status = client.shutdown.get_node() # If response contains these two keys the feature flag isn't enabled # on this cluster so skip this step now. @@ -143,14 +143,14 @@ def wipe_node_shutdown_metadata(client): client.shutdown.delete_node(node_id=node_id) -def wipe_tasks(client): +def wipe_tasks(client) -> None: tasks = client.tasks.list() for node_name, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): client.tasks.cancel(task_id=task_id, wait_for_completion=True) -def wait_for_pending_tasks(client, filter, timeout=30): +def wait_for_pending_tasks(client, filter, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: tasks = client.cat.tasks(detailed=True).split("\n") @@ -158,7 +158,7 @@ def wait_for_pending_tasks(client, filter, timeout=30): break -def wait_for_pending_datafeeds_and_jobs(client, timeout=30): +def wait_for_pending_datafeeds_and_jobs(client, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if ( @@ -171,7 +171,7 @@ def wait_for_pending_datafeeds_and_jobs(client, timeout=30): break -def wait_for_cluster_state_updates_to_finish(client, timeout=30): +def wait_for_cluster_state_updates_to_finish(client, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if not client.cluster.pending_tasks().get("tasks", ()): diff --git a/utils/build-dists.py b/utils/build-dists.py index c52421e7..b45da98e 100644 --- a/utils/build-dists.py +++ b/utils/build-dists.py @@ -52,7 +52,7 @@ def set_tmp_dir(): tmp_dir = None -def run(*argv, expect_exit_code=0): +def run(*argv, expect_exit_code: int = 0) -> None: global tmp_dir if tmp_dir is None: os.chdir(base_dir) @@ -70,7 +70,7 @@ def run(*argv, expect_exit_code=0): exit(exit_code or 1) -def test_dist(dist): +def test_dist(dist) -> None: with set_tmp_dir() as tmp_dir: dist_name = re.match( r"^(opensearchpy\d*)-", @@ -180,7 +180,7 @@ def test_dist(dist): ) -def main(): +def main() -> None: run("git", "checkout", "--", "setup.py", "opensearchpy/") run("rm", "-rf", "build/", "dist/*", "*.egg-info", ".eggs") run("python", "setup.py", "sdist", "bdist_wheel") @@ -188,9 +188,13 @@ def main(): # Grab the major version to be used as a suffix. version_path = os.path.join(base_dir, "opensearchpy/_version.py") with open(version_path) as f: - version = re.search( - r"^__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read(), re.M - ).group(1) + data = f.read() + m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) + if m: + version = m.group(1) + else: + raise Exception(f"Invalid version {data}") + major_version = version.split(".")[0] # If we're handed a version from the build manager we diff --git a/utils/generate-api.py b/utils/generate-api.py index 7e241236..f53e212c 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -78,7 +78,7 @@ ) -def blacken(filename): +def blacken(filename) -> None: runner = CliRunner() result = runner.invoke(black.main, [str(filename)]) assert result.exit_code == 0, result.output @@ -90,29 +90,20 @@ def is_valid_url(url): class Module: - def __init__(self, namespace, is_pyi=False): + def __init__(self, namespace) -> None: self.namespace = namespace - self.is_pyi = is_pyi self._apis = [] self.parse_orig() - if not is_pyi: - self.pyi = Module(namespace, is_pyi=True) - self.pyi.orders = self.orders[:] - - def add(self, api): + def add(self, api) -> None: self._apis.append(api) def parse_orig(self): self.orders = [] - self.header = "" - if self.is_pyi is True: - self.header = "from typing import Any, Collection, MutableMapping, Optional, Tuple, Union\n\n" + self.header = "from typing import Any, Collection, Optional, Tuple, Union\n\n" namespace_new = "".join(word.capitalize() for word in self.namespace.split("_")) - self.header = ( - self.header + "class " + namespace_new + "Client(NamespacedClient):" - ) + self.header += "class " + namespace_new + "Client(NamespacedClient):" if os.path.exists(self.filepath): with open(self.filepath) as f: content = f.read() @@ -127,12 +118,10 @@ def parse_orig(self): for line in content.split("\n"): header_lines.append(line) if line.startswith("class"): - if ( - "security.py" in str(self.filepath) - and not self.filepath.suffix == ".pyi" - ): + if "security.py" in str(self.filepath): + # TODO: FIXME, import code header_lines.append( - " from ._patch import health_check, update_audit_config" + " from ._patch import health_check, update_audit_config # type: ignore" ) break self.header = "\n".join(header_lines) @@ -146,10 +135,10 @@ def _position(self, api): except ValueError: return len(self.orders) - def sort(self): + def sort(self) -> None: self._apis.sort(key=self._position) - def dump(self): + def dump(self) -> None: self.sort() # This code snippet adds headers to each generated module indicating that the code is generated. @@ -244,22 +233,15 @@ def dump(self): with open(self.filepath, "w") as f: f.write(file_content) - if not self.is_pyi: - self.pyi.dump() - @property def filepath(self): - return ( - CODE_ROOT - / f"opensearchpy/_async/client/{self.namespace}.py{'i' if self.is_pyi else ''}" - ) + return CODE_ROOT / f"opensearchpy/_async/client/{self.namespace}.py" class API: - def __init__(self, namespace, name, definition, is_pyi=False): + def __init__(self, namespace, name, definition) -> None: self.namespace = namespace self.name = name - self.is_pyi = is_pyi # overwrite the dict to maintain key order definition["params"] = { @@ -429,13 +411,10 @@ def required_parts(self): return required def to_python(self): - if self.is_pyi: - t = jinja_env.get_template("base_pyi") - else: - try: - t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") - except TemplateNotFound: - t = jinja_env.get_template("base") + try: + t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") + except TemplateNotFound: + t = jinja_env.get_template("base") return t.render( api=self, @@ -658,7 +637,6 @@ def read_modules(): modules[namespace] = Module(namespace) modules[namespace].add(API(namespace, name, api)) - modules[namespace].pyi.add(API(namespace, name, api, is_pyi=True)) return modules @@ -697,10 +675,9 @@ def dump_modules(modules): filepaths = [] for root, _, filenames in os.walk(CODE_ROOT / "opensearchpy/_async"): for filename in filenames: - if filename.rpartition(".")[-1] in ( - "py", - "pyi", - ) and not filename.startswith("utils.py"): + if filename.rpartition(".")[-1] in ("py",) and not filename.startswith( + "utils.py" + ): filepaths.append(os.path.join(root, filename)) unasync.unasync_files(filepaths, rules) diff --git a/utils/license-headers.py b/utils/license-headers.py index 67b0ef4a..e0f31b59 100644 --- a/utils/license-headers.py +++ b/utils/license-headers.py @@ -48,7 +48,7 @@ def find_files_to_fix(sources: List[str]) -> Iterator[str]: def does_file_need_fix(filepath: str) -> bool: - if not re.search(r"\.pyi?$", filepath): + if not re.search(r"\.py$", filepath): return False existing_header = "" with open(filepath, mode="r") as f: @@ -78,7 +78,7 @@ def add_header_to_file(filepath: str) -> None: print(f"Fixed {os.path.relpath(filepath, os.getcwd())}") -def main(): +def main() -> None: mode = sys.argv[1] assert mode in ("fix", "check") sources = [os.path.abspath(x) for x in sys.argv[2:]] diff --git a/utils/templates/base b/utils/templates/base index bf270aee..54db3451 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -1,6 +1,6 @@ - @query_params({{ api.query_params|map("tojson")|join(", ")}}) - async def {{ api.name }}(self, {% include "func_params" %}): + @query_params({{ api.query_params|map("tojson")|join(", ")}}) + async def {{ api.name }}(self, {% include "func_params" %}) -> Any: """ {% if api.description %} {{ api.description|replace("\n", " ")|wordwrap(wrapstring="\n ") }} diff --git a/utils/templates/base_pyi b/utils/templates/base_pyi deleted file mode 100644 index c4dbde15..00000000 --- a/utils/templates/base_pyi +++ /dev/null @@ -1,2 +0,0 @@ - - async def {{ api.name }}(self, {% include "func_params_pyi" %}) -> {% if api.method == 'HEAD' %}bool{% else %}Any{% endif %}: ... diff --git a/utils/templates/func_params b/utils/templates/func_params index 067e8f12..cbb976ed 100644 --- a/utils/templates/func_params +++ b/utils/templates/func_params @@ -1,14 +1,15 @@ {% for p, info in api.all_parts.items() %} - {% if info.required %}{{ p }}, {% endif %} + {% if info.required %}{{ p }}: {{ info.type }}, {% endif %} {% endfor %} {% if api.body %} - body{% if not api.body.required %}=None{% endif %}, + body{% if not api.body.required %}: Any=None{% else %}: Any{% endif %}, {% endif %} {% for p, info in api.all_parts.items() %} - {% if not info.required %}{{ p }}=None, {% endif %} + {% if not info.required and not info.type == 'Any' %}{{ p }}: Optional[{{ info.type }}]=None, {% endif %} + {% if not info.required and info.type == 'Any' %}{{ p }}: {{ info.type }}=None, {% endif %} {% endfor %} -params=None, -headers=None +params: Any=None, +headers: Any=None, \ No newline at end of file diff --git a/utils/templates/func_params_pyi b/utils/templates/func_params_pyi deleted file mode 100644 index cd48f9a6..00000000 --- a/utils/templates/func_params_pyi +++ /dev/null @@ -1,26 +0,0 @@ -{% for p, info in api.all_parts.items() %} - {% if info.required %}{{ p }}: {{ info.type }}, {% endif %} -{% endfor %} - -*, - -{% if api.body %} - body{% if not api.body.required %}: Optional[Any]=...{% else %}: Any{% endif %}, -{% endif %} - -{% for p, info in api.all_parts.items() %} - {% if not info.required %}{{ p }}: Optional[{{ info.type }}]=..., {% endif %} -{% endfor %} - -{% for p in api.query_params %} - {{ p }}: Optional[Any]=..., -{% endfor %} - -{% for p, p_type in global_query_params.items() %} - {% if p not in api.all_func_params %} - {{ p }}: {{ p_type }}=..., - {% endif %} -{% endfor %} - -params: Optional[MutableMapping[str, Any]]=..., -headers: Optional[MutableMapping[str, str]]=..., From 3de1a8f1a8d87836ecfe4be7e98e611354d09ea3 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 9 Nov 2023 10:51:20 -0500 Subject: [PATCH 42/80] Expanded type coverage to benchmarks, samples and tests. (#566) * Renamed json samples to fix duplicate module name. Signed-off-by: dblock * Enabled mypy on all source files. Signed-off-by: dblock * Added missing types. Signed-off-by: dblock * Added CHANGELOG. Signed-off-by: dblock * Move type: ignore to fix untyped decorator makes function untyped. Signed-off-by: dblock * Fix nox -rs lint-3.7. Signed-off-by: dblock * Fixed incorrect import. Signed-off-by: dblock * Fix broken test. Signed-off-by: dblock * Fixed TestBulk::test_bulk_works_with_bytestring_body. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + benchmarks/bench_async.py | 5 +- benchmarks/bench_info_sync.py | 7 +- benchmarks/bench_sync.py | 7 +- benchmarks/thread_with_return_value.py | 19 ++- docs/source/conf.py | 34 +++-- noxfile.py | 27 ++-- opensearchpy/__init__.py | 1 + opensearchpy/_async/helpers/document.py | 14 +- opensearchpy/_async/helpers/index.py | 2 +- opensearchpy/_async/http_aiohttp.py | 2 +- opensearchpy/client/utils.py | 10 +- opensearchpy/connection/async_connections.py | 2 + opensearchpy/connection_pool.py | 6 +- opensearchpy/helpers/actions.py | 10 +- opensearchpy/helpers/asyncsigner.py | 16 +- opensearchpy/helpers/field.py | 4 +- opensearchpy/helpers/index.py | 2 +- opensearchpy/helpers/query.py | 8 +- opensearchpy/helpers/search.py | 3 + opensearchpy/helpers/test.py | 16 +- opensearchpy/helpers/utils.py | 4 +- opensearchpy/transport.py | 2 +- samples/bulk/bulk-array.py | 3 +- samples/hello/hello-async.py | 2 +- .../{hello-async.py => json-hello-async.py} | 2 +- samples/json/{hello.py => json-hello.py} | 0 samples/knn/knn-async-basics.py | 2 +- test_opensearchpy/TestHttpServer.py | 4 +- test_opensearchpy/run_tests.py | 5 +- .../test_async/test_connection.py | 42 ++--- .../test_async/test_helpers/conftest.py | 20 +-- .../test_async/test_helpers/test_document.py | 121 ++++++++------- .../test_helpers/test_faceted_search.py | 11 +- .../test_async/test_helpers/test_index.py | 11 +- .../test_async/test_helpers/test_mapping.py | 6 +- .../test_async/test_helpers/test_search.py | 25 +-- .../test_helpers/test_update_by_query.py | 8 +- .../test_async/test_http_connection.py | 16 +- .../test_async/test_plugins_client.py | 3 +- .../test_async/test_server/__init__.py | 4 +- .../test_async/test_server/conftest.py | 7 +- .../test_async/test_server/test_clients.py | 10 +- .../test_server/test_helpers/conftest.py | 27 ++-- .../test_server/test_helpers/test_actions.py | 135 +++++++++-------- .../test_server/test_helpers/test_data.py | 8 +- .../test_server/test_helpers/test_document.py | 91 ++++++----- .../test_helpers/test_faceted_search.py | 33 ++-- .../test_server/test_helpers/test_index.py | 20 ++- .../test_server/test_helpers/test_mapping.py | 10 +- .../test_server/test_helpers/test_search.py | 22 +-- .../test_helpers/test_update_by_query.py | 14 +- .../test_server/test_plugins/test_alerting.py | 4 +- .../test_server/test_rest_api_spec.py | 33 ++-- .../test_security_plugin.py | 6 +- test_opensearchpy/test_async/test_signer.py | 12 +- .../test_async/test_transport.py | 130 ++++++++-------- test_opensearchpy/test_cases.py | 32 ++-- .../test_plugins/test_plugins_client.py | 3 +- test_opensearchpy/test_client/test_utils.py | 8 +- .../test_connection/test_base_connection.py | 4 +- .../test_requests_http_connection.py | 39 +++-- .../test_urllib3_http_connection.py | 20 ++- test_opensearchpy/test_connection_pool.py | 7 +- test_opensearchpy/test_helpers/conftest.py | 20 +-- .../test_helpers/test_actions.py | 25 +-- test_opensearchpy/test_helpers/test_aggs.py | 26 ++-- .../test_helpers/test_analysis.py | 12 +- .../test_helpers/test_document.py | 143 ++++++++++-------- .../test_helpers/test_faceted_search.py | 11 +- test_opensearchpy/test_helpers/test_field.py | 15 +- test_opensearchpy/test_helpers/test_index.py | 37 ++--- .../test_helpers/test_mapping.py | 6 +- test_opensearchpy/test_helpers/test_query.py | 14 +- test_opensearchpy/test_helpers/test_result.py | 41 ++--- test_opensearchpy/test_helpers/test_search.py | 77 +++++----- .../test_helpers/test_update_by_query.py | 11 +- test_opensearchpy/test_helpers/test_utils.py | 4 +- .../test_helpers/test_validation.py | 45 +++--- .../test_helpers/test_wrappers.py | 21 +-- test_opensearchpy/test_serializer.py | 3 +- test_opensearchpy/test_server/__init__.py | 5 +- test_opensearchpy/test_server/conftest.py | 13 +- .../test_server/test_helpers/conftest.py | 28 ++-- .../test_server/test_helpers/test_actions.py | 58 +++---- .../test_server/test_helpers/test_analysis.py | 8 +- .../test_server/test_helpers/test_count.py | 8 +- .../test_server/test_helpers/test_data.py | 8 +- .../test_server/test_helpers/test_document.py | 81 +++++----- .../test_helpers/test_faceted_search.py | 33 ++-- .../test_server/test_helpers/test_index.py | 20 +-- .../test_server/test_helpers/test_mapping.py | 10 +- .../test_server/test_helpers/test_search.py | 22 +-- .../test_helpers/test_update_by_query.py | 8 +- .../test_server/test_plugins/test_alerting.py | 4 +- .../test_server/test_rest_api_spec.py | 82 +++++----- .../test_security_plugin.py | 2 +- test_opensearchpy/test_transport.py | 83 +++++----- test_opensearchpy/utils.py | 29 ++-- utils/build-dists.py | 15 +- utils/generate-api.py | 63 ++++---- 101 files changed, 1234 insertions(+), 1019 deletions(-) rename samples/json/{hello-async.py => json-hello-async.py} (98%) rename samples/json/{hello.py => json-hello.py} (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index b4bc0a02..96ce97dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) - Use API generator for all APIs ([#551](https://github.com/opensearch-project/opensearch-py/pull/551)) - Merge `.pyi` type stubs inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) +- Expanded type coverage to benchmarks, samples and tests ([#566](https://github.com/opensearch-project/opensearch-py/pull/566)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index a27a126c..baeb7d80 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -12,6 +12,7 @@ import asyncio import uuid +from typing import Any from opensearchpy import AsyncHttpConnection, AsyncOpenSearch @@ -22,7 +23,7 @@ item_count = 100 -async def index_records(client, item_count) -> None: +async def index_records(client: Any, item_count: int) -> None: await asyncio.gather( *[ client.index( @@ -39,7 +40,7 @@ async def index_records(client, item_count) -> None: ) -async def test_async(client_count=1, item_count=1): +async def test_async(client_count: int = 1, item_count: int = 1) -> None: clients = [] for i in range(client_count): clients.append( diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 29b289cd..0c69a102 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -14,6 +14,7 @@ import logging import sys import time +from typing import Any from thread_with_return_value import ThreadWithReturnValue @@ -36,8 +37,8 @@ root.addHandler(handler) -def get_info(client, request_count): - tt = 0 +def get_info(client: Any, request_count: int) -> float: + tt: float = 0 for n in range(request_count): start = time.time() * 1000 client.info() @@ -46,7 +47,7 @@ def get_info(client, request_count): return tt -def test(thread_count=1, request_count=1, client_count=1): +def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -> None: clients = [] for i in range(client_count): clients.append( diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 83225ef9..004fa2e4 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -15,6 +15,7 @@ import sys import time import uuid +from typing import Any from thread_with_return_value import ThreadWithReturnValue @@ -37,10 +38,10 @@ root.addHandler(handler) -def index_records(client, item_count): +def index_records(client: Any, item_count: int) -> Any: tt = 0 for n in range(10): - data = [] + data: Any = [] for i in range(item_count): data.append( json.dumps({"index": {"_index": index_name, "_id": str(uuid.uuid4())}}) @@ -63,7 +64,7 @@ def index_records(client, item_count): return tt -def test(thread_count=1, item_count=1, client_count=1): +def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> None: clients = [] for i in range(client_count): clients.append( diff --git a/benchmarks/thread_with_return_value.py b/benchmarks/thread_with_return_value.py index b6bc9c09..089c6fde 100644 --- a/benchmarks/thread_with_return_value.py +++ b/benchmarks/thread_with_return_value.py @@ -10,19 +10,30 @@ from threading import Thread +from typing import Any, Optional class ThreadWithReturnValue(Thread): + _target: Any + _args: Any + _kwargs: Any + def __init__( - self, group=None, target=None, name=None, args=(), kwargs={}, Verbose=None - ): + self, + group: Any = None, + target: Any = None, + name: Optional[str] = None, + args: Any = (), + kwargs: Any = {}, + Verbose: Optional[bool] = None, + ) -> None: Thread.__init__(self, group, target, name, args, kwargs) self._return = None - def run(self): + def run(self) -> None: if self._target is not None: self._return = self._target(*self._args, **self._kwargs) - def join(self, *args): + def join(self, *args: Any) -> Any: Thread.join(self, *args) return self._return diff --git a/docs/source/conf.py b/docs/source/conf.py index 133a2564..64ff3c52 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -26,9 +26,11 @@ # -- Project information ----------------------------------------------------- -project = "OpenSearch Python Client" -copyright = "OpenSearch Project Contributors" -author = "OpenSearch Project Contributors" +from typing import Any + +project: str = "OpenSearch Python Client" +copyright: str = "OpenSearch Project Contributors" +author: str = "OpenSearch Project Contributors" # -- General configuration --------------------------------------------------- @@ -36,7 +38,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [ +extensions: Any = [ "sphinx.ext.autodoc", "sphinx_rtd_theme", "sphinx.ext.viewcode", @@ -47,12 +49,12 @@ ] # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +templates_path: Any = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] +exclude_patterns: Any = [] # -- Options for HTML output ------------------------------------------------- @@ -60,31 +62,31 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme: str = "sphinx_rtd_theme" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path: Any = ["_static"] # -- additional settings ------------------------------------------------- -intersphinx_mapping = { +intersphinx_mapping: Any = { "python": ("https://docs.python.org/3", None), } -html_logo = "imgs/OpenSearch.svg" +html_logo: str = "imgs/OpenSearch.svg" # These paths are either relative to html_static_path # or fully qualified paths (eg. https://...) -html_css_files = [ +html_css_files: Any = [ "css/custom.css", ] # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -html_show_sphinx = False +html_show_sphinx: bool = False # add github link -html_context = { +html_context: Any = { "display_github": True, "github_user": "opensearch-project", "github_repo": "opensearch-py", @@ -94,18 +96,18 @@ # -- autodoc config ------------------------------------------------- # This value controls how to represent typehints. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints -autodoc_typehints = "description" +autodoc_typehints: str = "description" # This value selects what content will be inserted into the main body of an autoclass directive. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autoclass_content -autoclass_content = "both" +autoclass_content: str = "both" # https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-add_module_names # add_module_names = False # The default options for autodoc directives. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_default_options -autodoc_default_options = { +autodoc_default_options: Any = { # If set, autodoc will generate document for the members of the target module, class or exception. # noqa: E501 # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#directive-option-automodule-members "members": True, diff --git a/noxfile.py b/noxfile.py index 3b97ad03..e9189cc9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -26,6 +26,8 @@ # under the License. +from typing import Any + import nox SOURCE_FILES = ( @@ -40,16 +42,16 @@ ) -@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) -def test(session) -> None: +@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) # type: ignore +def test(session: Any) -> None: session.install(".") session.install("-r", "dev-requirements.txt") session.run("python", "setup.py", "test") -@nox.session() -def format(session) -> None: +@nox.session() # type: ignore +def format(session: Any) -> None: session.install("black", "isort") session.run("isort", "--profile=black", *SOURCE_FILES) @@ -59,8 +61,8 @@ def format(session) -> None: lint(session) -@nox.session(python=["3.7"]) -def lint(session) -> None: +@nox.session(python=["3.7"]) # type: ignore +def lint(session: Any) -> None: session.install( "flake8", "black", @@ -70,6 +72,9 @@ def lint(session) -> None: "types-six", "types-simplejson", "types-python-dateutil", + "types-PyYAML", + "types-mock", + "types-pytz", ) session.run("isort", "--check", "--profile=black", *SOURCE_FILES) @@ -82,7 +87,7 @@ def lint(session) -> None: # Run mypy on the package and then the type examples separately for # the two different mypy use-cases, ourselves and our users. - session.run("mypy", "--strict", "opensearchpy/") + session.run("mypy", "--strict", *SOURCE_FILES) session.run("mypy", "--strict", "test_opensearchpy/test_types/sync_types.py") session.run("mypy", "--strict", "test_opensearchpy/test_types/async_types.py") @@ -93,8 +98,8 @@ def lint(session) -> None: session.run("mypy", "--strict", "test_opensearchpy/test_types/sync_types.py") -@nox.session() -def docs(session) -> None: +@nox.session() # type: ignore +def docs(session: Any) -> None: session.install(".") session.install( "-rdev-requirements.txt", "sphinx-rtd-theme", "sphinx-autodoc-typehints" @@ -102,8 +107,8 @@ def docs(session) -> None: session.run("python", "-m", "pip", "install", "sphinx-autodoc-typehints") -@nox.session() -def generate(session) -> None: +@nox.session() # type: ignore +def generate(session: Any) -> None: session.install("-rdev-requirements.txt") session.run("python", "utils/generate-api.py") format(session) diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 3dcd7389..e9ef6485 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -256,4 +256,5 @@ "AsyncTransport", "AsyncOpenSearch", "AsyncHttpConnection", + "__versionstr__", ] diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index 25196e01..83349f7e 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -10,7 +10,7 @@ import collections.abc as collections_abc from fnmatch import fnmatch -from typing import Any, Optional, Sequence, Tuple, Type +from typing import Any, Optional, Tuple, Type from six import add_metaclass @@ -128,9 +128,7 @@ def __repr__(self) -> str: ) @classmethod - def search( - cls, using: Optional[AsyncOpenSearch] = None, index: Optional[str] = None - ) -> AsyncSearch: + def search(cls, using: Any = None, index: Any = None) -> AsyncSearch: """ Create an :class:`~opensearchpy.AsyncSearch` instance that will search over this ``Document``. @@ -142,9 +140,9 @@ def search( @classmethod async def get( # type: ignore cls, - id: str, - using: Optional[AsyncOpenSearch] = None, - index: Optional[str] = None, + id: Any, + using: Any = None, + index: Any = None, **kwargs: Any, ) -> Any: """ @@ -189,7 +187,7 @@ async def exists( @classmethod async def mget( cls, - docs: Sequence[str], + docs: Any, using: Optional[AsyncOpenSearch] = None, index: Optional[str] = None, raise_on_error: Optional[bool] = True, diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index ea06f316..4f2a9918 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -59,7 +59,7 @@ async def save(self, using: Any = None) -> Any: class AsyncIndex(object): - def __init__(self, name: Any, using: str = "default") -> None: + def __init__(self, name: Any, using: Any = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index 34819970..3c7010ed 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -69,7 +69,7 @@ async def close(self) -> None: class AIOHttpConnection(AsyncConnection): - session: Optional[aiohttp.ClientSession] + session: aiohttp.ClientSession ssl_assert_fingerprint: Optional[str] def __init__( diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index 3ae204e6..0663fd1d 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -32,7 +32,7 @@ import weakref from datetime import date, datetime from functools import wraps -from typing import Any, Callable +from typing import Any, Callable, Optional from opensearchpy.serializer import Serializer @@ -185,17 +185,17 @@ def _wrapped(*args: Any, **kwargs: Any) -> Any: return _wrapper -def _bulk_body(serializer: Serializer, body: str) -> str: +def _bulk_body(serializer: Optional[Serializer], body: Any) -> Any: # if not passed in a string, serialize items and join by newline if not isinstance(body, string_types): - body = "\n".join(map(serializer.dumps, body)) + body = "\n".join(map(serializer.dumps, body)) # type: ignore # bulk body must end with a newline if isinstance(body, bytes): if not body.endswith(b"\n"): body += b"\n" - elif isinstance(body, string_types) and not body.endswith("\n"): - body += "\n" + elif isinstance(body, string_types) and not body.endswith("\n"): # type: ignore + body += "\n" # type: ignore return body diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index 87467ae0..670bbaeb 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -18,6 +18,8 @@ class AsyncConnections(object): + _conns: Any + """ Class responsible for holding connections to different clusters. Used as a singleton in this module. diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index defef6f5..378b91b3 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -124,7 +124,7 @@ class ConnectionPool(object): connections: Any orig_connections: Tuple[Connection, ...] dead: Any - dead_count: Dict[Connection, int] + dead_count: Dict[Any, int] dead_timeout: float timeout_cutoff: int selector: Any @@ -173,7 +173,7 @@ def __init__( self.selector = selector_class(dict(connections)) # type: ignore - def mark_dead(self, connection: Connection, now: Optional[float] = None) -> None: + def mark_dead(self, connection: Any, now: Optional[float] = None) -> None: """ Mark the connection as dead (failed). Remove it from the live pool and put it on a timeout. @@ -203,7 +203,7 @@ def mark_dead(self, connection: Connection, now: Optional[float] = None) -> None timeout, ) - def mark_live(self, connection: Connection) -> None: + def mark_live(self, connection: Any) -> None: """ Mark connection as healthy after a resurrection. Resets the fail counter for the connection. diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index 39e3cdaf..7f8ced35 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -503,12 +503,12 @@ def _setup_queues(self) -> None: def scan( client: Any, query: Any = None, - scroll: str = "5m", - raise_on_error: bool = True, - preserve_order: bool = False, - size: int = 1000, + scroll: Optional[str] = "5m", + raise_on_error: Optional[bool] = True, + preserve_order: Optional[bool] = False, + size: Optional[int] = 1000, request_timeout: Optional[float] = None, - clear_scroll: bool = True, + clear_scroll: Optional[bool] = True, scroll_kwargs: Any = None, **kwargs: Any ) -> Any: diff --git a/opensearchpy/helpers/asyncsigner.py b/opensearchpy/helpers/asyncsigner.py index bd84e09e..8dee4fee 100644 --- a/opensearchpy/helpers/asyncsigner.py +++ b/opensearchpy/helpers/asyncsigner.py @@ -8,7 +8,7 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from typing import Dict, Union +from typing import Any, Dict, Optional, Union class AWSV4SignerAsyncAuth: @@ -16,7 +16,7 @@ class AWSV4SignerAsyncAuth: AWS V4 Request Signer for Async Requests. """ - def __init__(self, credentials, region: str, service: str = "es") -> None: # type: ignore + def __init__(self, credentials: Any, region: str, service: str = "es") -> None: if not credentials: raise ValueError("Credentials cannot be empty") self.credentials = credentials @@ -30,12 +30,20 @@ def __init__(self, credentials, region: str, service: str = "es") -> None: # ty self.service = service def __call__( - self, method: str, url: str, query_string: str, body: Union[str, bytes] + self, + method: str, + url: str, + query_string: Optional[str] = None, + body: Optional[Union[str, bytes]] = None, ) -> Dict[str, str]: return self._sign_request(method, url, query_string, body) def _sign_request( - self, method: str, url: str, query_string: str, body: Union[str, bytes] + self, + method: str, + url: str, + query_string: Optional[str], + body: Optional[Union[str, bytes]], ) -> Dict[str, str]: """ This method helps in signing the request by injecting the required headers. diff --git a/opensearchpy/helpers/field.py b/opensearchpy/helpers/field.py index 4881e819..4ffd21d8 100644 --- a/opensearchpy/helpers/field.py +++ b/opensearchpy/helpers/field.py @@ -268,9 +268,7 @@ class Date(Field): name: Optional[str] = "date" _coerce: bool = True - def __init__( - self, default_timezone: None = None, *args: Any, **kwargs: Any - ) -> None: + def __init__(self, default_timezone: Any = None, *args: Any, **kwargs: Any) -> None: """ :arg default_timezone: timezone that will be automatically used for tz-naive values May be instance of `datetime.tzinfo` or string containing TZ offset diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index e96136b2..3fbb475a 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -78,7 +78,7 @@ def save(self, using: Any = None) -> Any: class Index(object): - def __init__(self, name: Any, using: str = "default") -> None: + def __init__(self, name: Any, using: Any = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index dc2db8a7..e299f94a 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -31,12 +31,11 @@ # 'SF' looks unused but the test suite assumes it's available # from this module so others are liable to do so as well. -from ..helpers.function import SF # noqa: F401 -from ..helpers.function import ScoreFunction +from ..helpers.function import SF, ScoreFunction from .utils import DslBase -def Q(name_or_query: str = "match_all", **params: Any) -> Any: +def Q(name_or_query: Any = "match_all", **params: Any) -> Any: # {"match": {"title": "python"}} if isinstance(name_or_query, collections_abc.Mapping): if params: @@ -521,3 +520,6 @@ class ParentId(Query): class Wrapper(Query): name = "wrapper" + + +__all__ = ["SF"] diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 46ba9da9..069f4c89 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -864,3 +864,6 @@ def execute(self, ignore_cache: Any = False, raise_on_error: Any = True) -> Any: self._response = out return self._response + + +__all__ = ["Q"] diff --git a/opensearchpy/helpers/test.py b/opensearchpy/helpers/test.py index 96282c49..bda16b2e 100644 --- a/opensearchpy/helpers/test.py +++ b/opensearchpy/helpers/test.py @@ -26,11 +26,9 @@ # under the License. -# type: ignore - import os import time -from typing import Any, Tuple +from typing import Any from unittest import SkipTest, TestCase import opensearchpy.client @@ -52,7 +50,7 @@ def get_test_client(nowait: bool = False, **kwargs: Any) -> OpenSearch: ) kw.update(kwargs) - client = OpenSearch(OPENSEARCH_URL, **kw) + client = OpenSearch(OPENSEARCH_URL, **kw) # type: ignore # wait for yellow status for _ in range(1 if nowait else 100): @@ -67,6 +65,8 @@ def get_test_client(nowait: bool = False, **kwargs: Any) -> OpenSearch: class OpenSearchTestCase(TestCase): + client: Any + @staticmethod def _get_client() -> OpenSearch: return get_test_client() @@ -86,20 +86,20 @@ def teardown_method(self, _: Any) -> None: ) self.client.indices.delete_template(name="*", ignore=404) - def opensearch_version(self) -> Tuple[int, ...]: + def opensearch_version(self) -> Any: if not hasattr(self, "_opensearch_version"): self._opensearch_version = opensearch_version(self.client) return self._opensearch_version -def _get_version(version_string: str) -> Tuple[int, ...]: +def _get_version(version_string: str) -> Any: if "." not in version_string: return () version = version_string.strip().split(".") return tuple(int(v) if v.isdigit() else 999 for v in version) -def opensearch_version(client: opensearchpy.client.OpenSearch) -> Tuple[int, int, int]: +def opensearch_version(client: opensearchpy.client.OpenSearch) -> Any: return _get_version(client.info()["version"]["number"]) @@ -111,3 +111,5 @@ def opensearch_version(client: opensearchpy.client.OpenSearch) -> Tuple[int, int verify_certs=False, ) OPENSEARCH_VERSION = opensearch_version(client) + +__all__ = ["OpenSearchTestCase"] diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index e17b89a6..2a9f19da 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -284,7 +284,7 @@ def get_dsl_class(cls: Any, name: Any, default: Optional[bool] = None) -> Any: "DSL class `{}` does not exist in {}.".format(name, cls._type_name) ) - def __init__(self, _expand__to_dot: bool = EXPAND__TO_DOT, **params: Any) -> None: + def __init__(self, _expand__to_dot: Any = EXPAND__TO_DOT, **params: Any) -> None: self._params = {} for pname, pvalue in iteritems(params): if "__" in pname and _expand__to_dot: @@ -438,6 +438,8 @@ def __init__( class ObjectBase(AttrDict): + _doc_type: Any + def __init__(self, meta: Any = None, **kwargs: Any) -> None: meta = meta or {} for k in list(kwargs): diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index 583d9ba7..44962542 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -373,7 +373,7 @@ def perform_request( method: str, url: str, params: Optional[Mapping[str, Any]] = None, - body: Optional[bytes] = None, + body: Any = None, timeout: Optional[Union[int, float]] = None, ignore: Collection[int] = (), headers: Optional[Mapping[str, str]] = None, diff --git a/samples/bulk/bulk-array.py b/samples/bulk/bulk-array.py index 1859d541..5191a291 100755 --- a/samples/bulk/bulk-array.py +++ b/samples/bulk/bulk-array.py @@ -12,6 +12,7 @@ import os +from typing import Any from opensearchpy import OpenSearch @@ -45,7 +46,7 @@ ) # index data -data = [] +data: Any = [] for i in range(100): data.append({"index": {"_index": index_name, "_id": i}}) data.append({"value": i}) diff --git a/samples/hello/hello-async.py b/samples/hello/hello-async.py index 9975f575..8606a17d 100755 --- a/samples/hello/hello-async.py +++ b/samples/hello/hello-async.py @@ -16,7 +16,7 @@ from opensearchpy import AsyncOpenSearch -async def main(): +async def main() -> None: # connect to OpenSearch host = "localhost" port = 9200 diff --git a/samples/json/hello-async.py b/samples/json/json-hello-async.py similarity index 98% rename from samples/json/hello-async.py rename to samples/json/json-hello-async.py index b9105d35..fbadece6 100755 --- a/samples/json/hello-async.py +++ b/samples/json/json-hello-async.py @@ -16,7 +16,7 @@ from opensearchpy import AsyncOpenSearch -async def main(): +async def main() -> None: # connect to OpenSearch host = "localhost" port = 9200 diff --git a/samples/json/hello.py b/samples/json/json-hello.py similarity index 100% rename from samples/json/hello.py rename to samples/json/json-hello.py diff --git a/samples/knn/knn-async-basics.py b/samples/knn/knn-async-basics.py index a7bb9d2f..aa0acf6e 100755 --- a/samples/knn/knn-async-basics.py +++ b/samples/knn/knn-async-basics.py @@ -18,7 +18,7 @@ from opensearchpy import AsyncHttpConnection, AsyncOpenSearch, helpers -async def main(): +async def main() -> None: # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") port = int(os.getenv("PORT", 9200)) diff --git a/test_opensearchpy/TestHttpServer.py b/test_opensearchpy/TestHttpServer.py index ba83e041..3d8b31fb 100644 --- a/test_opensearchpy/TestHttpServer.py +++ b/test_opensearchpy/TestHttpServer.py @@ -11,10 +11,11 @@ import json import threading from http.server import BaseHTTPRequestHandler, HTTPServer +from typing import Any class TestHTTPRequestHandler(BaseHTTPRequestHandler): - def do_GET(self): + def do_GET(self) -> None: headers = self.headers if self.path == "/redirect": @@ -40,6 +41,7 @@ def do_GET(self): class TestHTTPServer(HTTPServer): __test__ = False + _server_thread: Any def __init__(self, host: str = "localhost", port: int = 8080) -> None: super().__init__((host, port), TestHTTPRequestHandler) diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index de93adc7..b37fd598 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -37,6 +37,7 @@ import sys from os import environ from os.path import abspath, dirname, exists, join, pardir +from typing import Any def fetch_opensearch_repo() -> None: @@ -88,8 +89,8 @@ def fetch_opensearch_repo() -> None: subprocess.check_call("cd %s && git fetch origin %s" % (repo_path, sha), shell=True) -def run_all(argv: None = None) -> None: - sys.exitfunc = lambda: sys.stderr.write("Shutting down....\n") +def run_all(argv: Any = None) -> None: + sys.exitfunc = lambda: sys.stderr.write("Shutting down....\n") # type: ignore # fetch yaml tests anywhere that's not GitHub Actions if "GITHUB_ACTION" not in environ: fetch_opensearch_repo() diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index e72a2358..9413d0e8 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -32,6 +32,7 @@ import ssl import warnings from platform import python_version +from typing import Any import aiohttp import pytest @@ -52,29 +53,29 @@ class TestAIOHttpConnection: async def _get_mock_connection( self, - connection_params={}, + connection_params: Any = {}, response_code: int = 200, response_body: bytes = b"{}", - response_headers={}, - ): + response_headers: Any = {}, + ) -> Any: con = AIOHttpConnection(**connection_params) await con._create_aiohttp_session() - def _dummy_request(*args, **kwargs): + def _dummy_request(*args: Any, **kwargs: Any) -> Any: class DummyResponse: - async def __aenter__(self, *_, **__): + async def __aenter__(self, *_: Any, **__: Any) -> Any: return self - async def __aexit__(self, *_, **__): + async def __aexit__(self, *_: Any, **__: Any) -> None: pass - async def text(self): + async def text(self) -> Any: return response_body.decode("utf-8", "surrogatepass") - dummy_response = DummyResponse() + dummy_response: Any = DummyResponse() dummy_response.headers = CIMultiDict(**response_headers) dummy_response.status = response_code - _dummy_request.call_args = (args, kwargs) + _dummy_request.call_args = (args, kwargs) # type: ignore return dummy_response con.session.request = _dummy_request @@ -231,6 +232,7 @@ async def test_no_warning_when_using_ssl_context(self) -> None: assert w == [], str([x.message for x in w]) async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: + kwargs: Any for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, @@ -253,26 +255,28 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> N ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_given_ca_certs(self, load_verify_locations, tmp_path) -> None: + async def test_uses_given_ca_certs( + self, load_verify_locations: Any, tmp_path: Any + ) -> None: path = tmp_path / "ca_certs.pem" path.touch() AIOHttpConnection(use_ssl=True, ca_certs=str(path)) load_verify_locations.assert_called_once_with(cafile=str(path)) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_default_ca_certs(self, load_verify_locations) -> None: + async def test_uses_default_ca_certs(self, load_verify_locations: Any) -> None: AIOHttpConnection(use_ssl=True) load_verify_locations.assert_called_once_with( cafile=Connection.default_ca_certs() ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_no_ca_certs(self, load_verify_locations) -> None: + async def test_uses_no_ca_certs(self, load_verify_locations: Any) -> None: AIOHttpConnection(use_ssl=True, verify_certs=False) load_verify_locations.assert_not_called() async def test_trust_env(self) -> None: - con = AIOHttpConnection(trust_env=True) + con: Any = AIOHttpConnection(trust_env=True) await con._create_aiohttp_session() assert con._trust_env is True @@ -286,7 +290,7 @@ async def test_trust_env_default_value_is_false(self) -> None: assert con.session.trust_env is False @patch("opensearchpy.connection.base.logger") - async def test_uncompressed_body_logged(self, logger) -> None: + async def test_uncompressed_body_logged(self, logger: Any) -> None: con = await self._get_mock_connection(connection_params={"http_compress": True}) await con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -302,11 +306,11 @@ async def test_surrogatepass_into_bytes(self) -> None: status, headers, data = await con.perform_request("GET", "/") assert u"你好\uda6a" == data # fmt: skip - @pytest.mark.parametrize("exception_cls", reraise_exceptions) - async def test_recursion_error_reraised(self, exception_cls) -> None: + @pytest.mark.parametrize("exception_cls", reraise_exceptions) # type: ignore + async def test_recursion_error_reraised(self, exception_cls: Any) -> None: conn = AIOHttpConnection() - def request_raise(*_, **__): + def request_raise(*_: Any, **__: Any) -> Any: raise exception_cls("Wasn't modified!") await conn._create_aiohttp_session() @@ -334,6 +338,8 @@ async def test_json_errors_are_parsed(self) -> None: class TestConnectionHttpServer: """Tests the HTTP connection implementations against a live server E2E""" + server: Any + @classmethod def setup_class(cls) -> None: # Start server @@ -345,7 +351,7 @@ def teardown_class(cls) -> None: # Stop server cls.server.stop() - async def httpserver(self, conn, **kwargs): + async def httpserver(self, conn: Any, **kwargs: Any) -> Any: status, headers, data = await conn.perform_request("GET", "/", **kwargs) data = json.loads(data) return (status, data) diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index f24b8a48..bd1776ab 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -9,6 +9,8 @@ # GitHub history for details. +from typing import Any + import pytest from _pytest.mark.structures import MarkDecorator from mock import Mock @@ -19,18 +21,18 @@ pytestmark: MarkDecorator = pytest.mark.asyncio -@fixture -async def mock_client(dummy_response): +@fixture # type: ignore +async def mock_client(dummy_response: Any) -> Any: client = Mock() client.search.return_value = dummy_response await add_connection("mock", client) yield client - async_connections._conn = {} + async_connections._conns = {} async_connections._kwargs = {} -@fixture -def dummy_response(): +@fixture # type: ignore +def dummy_response() -> Any: return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -78,8 +80,8 @@ def dummy_response(): } -@fixture -def aggs_search(): +@fixture # type: ignore +def aggs_search() -> Any: from opensearchpy._async.helpers.search import AsyncSearch s = AsyncSearch(index="flat-git") @@ -93,8 +95,8 @@ def aggs_search(): return s -@fixture -def aggs_data(): +@fixture # type: ignore +def aggs_data() -> Any: return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index d13c7272..d6ef0128 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -15,6 +15,7 @@ import pickle from datetime import datetime from hashlib import sha256 +from typing import Any import pytest from _pytest.mark.structures import MarkDecorator @@ -31,25 +32,25 @@ class MyInner(InnerDoc): - old_field = field.Text() + old_field: Any = field.Text() class MyDoc(document.AsyncDocument): - title = field.Keyword() - name = field.Text() - created_at = field.Date() - inner = field.Object(MyInner) + title: Any = field.Keyword() + name: Any = field.Text() + created_at: Any = field.Date() + inner: Any = field.Object(MyInner) class MySubDoc(MyDoc): - name = field.Keyword() + name: Any = field.Keyword() class Index: name = "default-index" class MyDoc2(document.AsyncDocument): - extra = field.Long() + extra: Any = field.Long() class MyMultiSubDoc(MyDoc2, MySubDoc): @@ -57,19 +58,19 @@ class MyMultiSubDoc(MyDoc2, MySubDoc): class Comment(InnerDoc): - title = field.Text() - tags = field.Keyword(multi=True) + title: Any = field.Text() + tags: Any = field.Keyword(multi=True) class DocWithNested(document.AsyncDocument): - comments = field.Nested(Comment) + comments: Any = field.Nested(Comment) class Index: name = "test-doc-with-nested" class SimpleCommit(document.AsyncDocument): - files = field.Text(multi=True) + files: Any = field.Text(multi=True) class Index: name = "test-git" @@ -80,48 +81,54 @@ class Secret(str): class SecretField(field.CustomField): - builtin_type = "text" + builtin_type: Any = "text" - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: return codecs.encode(data, "rot_13") - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, Secret): return data return Secret(codecs.decode(data, "rot_13")) class SecretDoc(document.AsyncDocument): - title = SecretField(index="no") + title: Any = SecretField(index="no") class Index: name = "test-secret-doc" class NestedSecret(document.AsyncDocument): - secrets = field.Nested(SecretDoc) + secrets: Any = field.Nested(SecretDoc) class Index: name = "test-nested-secret" + _index: Any + class OptionalObjectWithRequiredField(document.AsyncDocument): - comments = field.Nested(properties={"title": field.Keyword(required=True)}) + comments: Any = field.Nested(properties={"title": field.Keyword(required=True)}) class Index: name = "test-required" + _index: Any + class Host(document.AsyncDocument): - ip = field.Ip() + ip: Any = field.Ip() class Index: name = "test-host" + _index: Any + async def test_range_serializes_properly() -> None: class D(document.AsyncDocument): - lr = field.LongRange() + lr: Any = field.LongRange() d = D(lr=Range(lt=42)) assert 40 in d.lr @@ -200,7 +207,7 @@ async def test_assigning_attrlist_to_field() -> None: async def test_optional_inner_objects_are_not_validated_if_missing() -> None: - d = OptionalObjectWithRequiredField() + d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None @@ -253,13 +260,15 @@ async def test_null_value_for_object() -> None: assert d.inner is None -async def test_inherited_doc_types_can_override_index(): +async def test_inherited_doc_types_can_override_index() -> None: class MyDocDifferentIndex(MySubDoc): + _index: Any + class Index: - name = "not-default-index" - settings = {"number_of_replicas": 0} - aliases = {"a": {}} - analyzers = [analyzer("my_analizer", tokenizer="keyword")] + name: Any = "not-default-index" + settings: Any = {"number_of_replicas": 0} + aliases: Any = {"a": {}} + analyzers: Any = [analyzer("my_analizer", tokenizer="keyword")] assert MyDocDifferentIndex._index._name == "not-default-index" assert MyDocDifferentIndex()._get_index() == "not-default-index" @@ -285,7 +294,7 @@ class Index: } -async def test_to_dict_with_meta(): +async def test_to_dict_with_meta() -> None: d = MySubDoc(title="hello") d.meta.routing = "some-parent" @@ -296,7 +305,7 @@ async def test_to_dict_with_meta(): } == d.to_dict(True) -async def test_to_dict_with_meta_includes_custom_index(): +async def test_to_dict_with_meta_includes_custom_index() -> None: d = MySubDoc(title="hello") d.meta.index = "other-index" @@ -340,7 +349,7 @@ async def test_meta_is_accessible_even_on_empty_doc() -> None: d.meta -async def test_meta_field_mapping(): +async def test_meta_field_mapping() -> None: class User(document.AsyncDocument): username = field.Text() @@ -372,17 +381,17 @@ class Blog(document.AsyncDocument): async def test_docs_with_properties() -> None: class User(document.AsyncDocument): - pwd_hash = field.Text() + pwd_hash: Any = field.Text() - def check_password(self, pwd): + def check_password(self, pwd: Any) -> Any: return sha256(pwd).hexdigest() == self.pwd_hash @property - def password(self): + def password(self) -> Any: raise AttributeError("readonly") @password.setter - def password(self, pwd): + def password(self, pwd: Any) -> None: self.pwd_hash = sha256(pwd).hexdigest() u = User(pwd_hash=sha256(b"secret").hexdigest()) @@ -424,8 +433,8 @@ async def test_nested_defaults_to_list_and_can_be_updated() -> None: assert {"comments": [{"title": "hello World!"}]} == md.to_dict() -async def test_to_dict_is_recursive_and_can_cope_with_multi_values(): - md = MyDoc(name=["a", "b", "c"]) +async def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: + md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] assert isinstance(md.inner[0], MyInner) @@ -437,12 +446,12 @@ async def test_to_dict_is_recursive_and_can_cope_with_multi_values(): async def test_to_dict_ignores_empty_collections() -> None: - md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) + md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() -async def test_declarative_mapping_definition(): +async def test_declarative_mapping_definition() -> None: assert issubclass(MyDoc, document.AsyncDocument) assert hasattr(MyDoc, "_doc_type") assert { @@ -455,7 +464,7 @@ async def test_declarative_mapping_definition(): } == MyDoc._doc_type.mapping.to_dict() -async def test_you_can_supply_own_mapping_instance(): +async def test_you_can_supply_own_mapping_instance() -> None: class MyD(document.AsyncDocument): title = field.Text() @@ -469,9 +478,9 @@ class Meta: } == MyD._doc_type.mapping.to_dict() -async def test_document_can_be_created_dynamically(): +async def test_document_can_be_created_dynamically() -> None: n = datetime.now() - md = MyDoc(title="hello") + md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" md.created_at = n @@ -491,13 +500,13 @@ async def test_document_can_be_created_dynamically(): async def test_invalid_date_will_raise_exception() -> None: - md = MyDoc() + md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): md.full_clean() -async def test_document_inheritance(): +async def test_document_inheritance() -> None: assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.AsyncDocument) assert hasattr(MySubDoc, "_doc_type") @@ -511,7 +520,7 @@ async def test_document_inheritance(): } == MySubDoc._doc_type.mapping.to_dict() -async def test_child_class_can_override_parent(): +async def test_child_class_can_override_parent() -> None: class A(document.AsyncDocument): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -530,7 +539,7 @@ class B(A): async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: - md = MySubDoc(meta={"id": 42}, name="My First doc!") + md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" assert md.meta.index == "my-index" @@ -539,7 +548,7 @@ async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: assert {"id": 42, "index": "my-index"} == md.meta.to_dict() -async def test_index_inheritance(): +async def test_index_inheritance() -> None: assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.AsyncDocument) @@ -558,31 +567,31 @@ async def test_index_inheritance(): async def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() - md = MyDoc(_id=p, title="Hello World!") + md: Any = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -async def test_save_no_index(mock_client) -> None: - md = MyDoc() +async def test_save_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): await md.save(using="mock") -async def test_delete_no_index(mock_client) -> None: - md = MyDoc() +async def test_delete_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): await md.delete(using="mock") async def test_update_no_fields() -> None: - md = MyDoc() + md: Any = MyDoc() with raises(IllegalOperation): await md.update() -async def test_search_with_custom_alias_and_index(mock_client) -> None: - search_object = MyDoc.search( +async def test_search_with_custom_alias_and_index(mock_client: Any) -> None: + search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -590,8 +599,8 @@ async def test_search_with_custom_alias_and_index(mock_client) -> None: assert search_object._index == ["custom_index1", "custom_index2"] -async def test_from_opensearch_respects_underscored_non_meta_fields(): - doc = { +async def test_from_opensearch_respects_underscored_non_meta_fields() -> None: + doc: Any = { "_index": "test-index", "_id": "opensearch", "_score": 12.0, @@ -614,11 +623,11 @@ class Index: assert c._tagline == "You know, for search" -async def test_nested_and_object_inner_doc(): +async def test_nested_and_object_inner_doc() -> None: class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) - props = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] + props: Any = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] assert props == { "created_at": {"type": "date"}, "inner": {"properties": {"old_field": {"type": "text"}}, "type": "object"}, diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index 58c936c0..c27bd3ea 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -9,6 +9,7 @@ # GitHub history for details. from datetime import datetime +from typing import Any import pytest from _pytest.mark.structures import MarkDecorator @@ -55,7 +56,7 @@ async def test_query_is_created_properly() -> None: } == s.to_dict() -async def test_query_is_created_properly_with_sort_tuple(): +async def test_query_is_created_properly_with_sort_tuple() -> None: bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -79,7 +80,7 @@ async def test_query_is_created_properly_with_sort_tuple(): } == s.to_dict() -async def test_filter_is_applied_to_search_but_not_relevant_facet(): +async def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -102,7 +103,7 @@ async def test_filter_is_applied_to_search_but_not_relevant_facet(): } == s.to_dict() -async def test_filters_are_applied_to_search_ant_relevant_facets(): +async def test_filters_are_applied_to_search_ant_relevant_facets() -> None: bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -142,7 +143,7 @@ async def test_date_histogram_facet_with_1970_01_01_date() -> None: assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore ["interval_type", "interval"], [ ("interval", "year"), @@ -169,7 +170,7 @@ async def test_date_histogram_facet_with_1970_01_01_date() -> None: ("fixed_interval", "1h"), ], ) -async def test_date_histogram_interval_types(interval_type, interval) -> None: +async def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index 681b9cfe..e59d86ad 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -10,6 +10,7 @@ import string from random import choice +from typing import Any import pytest from _pytest.mark.structures import MarkDecorator @@ -118,7 +119,7 @@ async def test_registered_doc_type_included_in_search() -> None: async def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} index = AsyncIndex("i", using="alias") index.aliases(**alias_dict) @@ -128,7 +129,7 @@ async def test_aliases_add_to_object() -> None: async def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} index = AsyncIndex("i", using="alias") index.aliases(**alias_dict) @@ -136,7 +137,7 @@ async def test_aliases_returned_from_to_dict() -> None: assert index._aliases == index.to_dict()["aliases"] == alias_dict -async def test_analyzers_added_to_object(): +async def test_analyzers_added_to_object() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -152,7 +153,7 @@ async def test_analyzers_added_to_object(): } -async def test_analyzers_returned_from_to_dict(): +async def test_analyzers_returned_from_to_dict() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -173,7 +174,7 @@ async def test_conflicting_analyzer_raises_error() -> None: i.analyzer("my_analyzer", tokenizer="keyword", filter=["lowercase", "stop"]) -async def test_index_template_can_have_order(): +async def test_index_template_can_have_order() -> None: i = AsyncIndex("i-*") it = i.as_template("i", order=2) diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index 6ae4c0b7..797c295f 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -24,7 +24,7 @@ async def test_mapping_can_has_fields() -> None: } == m.to_dict() -async def test_mapping_update_is_recursive(): +async def test_mapping_update_is_recursive() -> None: m1 = mapping.AsyncMapping() m1.field("title", "text") m1.field("author", "object") @@ -67,7 +67,7 @@ async def test_properties_can_iterate_over_all_the_fields() -> None: } -async def test_mapping_can_collect_all_analyzers_and_normalizers(): +async def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -140,7 +140,7 @@ async def test_mapping_can_collect_all_analyzers_and_normalizers(): assert json.loads(json.dumps(m.to_dict())) == m.to_dict() -async def test_mapping_can_collect_multiple_analyzers(): +async def test_mapping_can_collect_multiple_analyzers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index c32a8c7c..1af617d7 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -9,6 +9,7 @@ # GitHub history for details. from copy import deepcopy +from typing import Any import pytest from _pytest.mark.structures import MarkDecorator @@ -71,7 +72,7 @@ async def test_query_can_be_assigned_to() -> None: assert s.query._proxied is q -async def test_query_can_be_wrapped(): +async def test_query_can_be_wrapped() -> None: s = search.AsyncSearch().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -142,7 +143,7 @@ async def test_aggs_allow_two_metric() -> None: } -async def test_aggs_get_copied_on_change(): +async def test_aggs_get_copied_on_change() -> None: s = search.AsyncSearch().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" @@ -155,7 +156,7 @@ async def test_aggs_get_copied_on_change(): s4 = s3._clone() s4.aggs.metric("max_score", "max", field="score") - d = { + d: Any = { "query": {"match_all": {}}, "aggs": { "per_tag": { @@ -218,7 +219,7 @@ class MyDocument(AsyncDocument): assert s._doc_type_map == {} -async def test_sort(): +async def test_sort() -> None: s = search.AsyncSearch() s = s.sort("fielda", "-fieldb") @@ -254,7 +255,7 @@ async def test_index() -> None: assert {"from": 3, "size": 1} == s[3].to_dict() -async def test_search_to_dict(): +async def test_search_to_dict() -> None: s = search.AsyncSearch() assert {} == s.to_dict() @@ -283,7 +284,7 @@ async def test_search_to_dict(): assert {"size": 5, "from": 42} == s.to_dict() -async def test_complex_example(): +async def test_complex_example() -> None: s = search.AsyncSearch() s = ( s.query("match", title="python") @@ -334,7 +335,7 @@ async def test_complex_example(): } == s.to_dict() -async def test_reverse(): +async def test_reverse() -> None: d = { "query": { "filtered": { @@ -406,7 +407,7 @@ async def test_source() -> None: ).source(["f1", "f2"]).to_dict() -async def test_source_on_clone(): +async def test_source_on_clone() -> None: assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -431,7 +432,7 @@ async def test_source_on_clear() -> None: ) -async def test_suggest_accepts_global_text(): +async def test_suggest_accepts_global_text() -> None: s = search.AsyncSearch.from_dict( { "suggest": { @@ -453,7 +454,7 @@ async def test_suggest_accepts_global_text(): } == s.to_dict() -async def test_suggest(): +async def test_suggest() -> None: s = search.AsyncSearch() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -475,7 +476,7 @@ async def test_exclude() -> None: } == s.to_dict() -async def test_update_from_dict(): +async def test_update_from_dict() -> None: s = search.AsyncSearch() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -486,7 +487,7 @@ async def test_update_from_dict(): } == s.to_dict() -async def test_rescore_query_to_dict(): +async def test_rescore_query_to_dict() -> None: s = search.AsyncSearch(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index b15983dc..52fc20c3 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -26,7 +26,7 @@ async def test_ubq_starts_with_no_query() -> None: assert ubq.query._proxied is None -async def test_ubq_to_dict(): +async def test_ubq_to_dict() -> None: ubq = update_by_query.AsyncUpdateByQuery() assert {} == ubq.to_dict() @@ -44,7 +44,7 @@ async def test_ubq_to_dict(): assert {"extra_q": {"term": {"category": "conference"}}} == ubq.to_dict() -async def test_complex_example(): +async def test_complex_example() -> None: ubq = update_by_query.AsyncUpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -95,7 +95,7 @@ async def test_exclude() -> None: } == ubq.to_dict() -async def test_reverse(): +async def test_reverse() -> None: d = { "query": { "filtered": { @@ -137,7 +137,7 @@ async def test_from_dict_doesnt_need_query() -> None: assert {"script": {"source": "test"}} == ubq.to_dict() -async def test_overwrite_script(): +async def test_overwrite_script() -> None: ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index 913a944d..febb231b 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -26,12 +26,14 @@ # under the License. +from typing import Any + import mock import pytest from _pytest.mark.structures import MarkDecorator from multidict import CIMultiDict -from opensearchpy._async._extra_imports import aiohttp +from opensearchpy._async._extra_imports import aiohttp # type: ignore from opensearchpy._async.compat import get_running_loop from opensearchpy.connection.http_async import AsyncHttpConnection @@ -52,15 +54,15 @@ def test_auth_as_string(self) -> None: assert c._http_auth.password, "password" def test_auth_as_callable(self) -> None: - def auth_fn(): + def auth_fn() -> None: pass c = AsyncHttpConnection(http_auth=auth_fn) assert callable(c._http_auth) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_basicauth_in_request_session(self, mock_request) -> None: - async def do_request(*args, **kwargs): + async def test_basicauth_in_request_session(self, mock_request: Any) -> None: + async def do_request(*args: Any, **kwargs: Any) -> Any: response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() response_mock.status = 200 @@ -90,13 +92,13 @@ async def do_request(*args, **kwargs): ) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_callable_in_request_session(self, mock_request) -> None: - def auth_fn(*args, **kwargs): + async def test_callable_in_request_session(self, mock_request: Any) -> None: + def auth_fn(*args: Any, **kwargs: Any) -> Any: return { "Test": "PASSED", } - async def do_request(*args, **kwargs): + async def do_request(*args: Any, **kwargs: Any) -> Any: response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() response_mock.status = 200 diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index 2364f0fa..d701892c 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -17,7 +17,8 @@ class TestPluginsClient(TestCase): async def test_plugins_client(self) -> None: with self.assertWarns(Warning) as w: client = AsyncOpenSearch() - client.plugins.__init__(client) # double-init + # testing double-init here + client.plugins.__init__(client) # type: ignore self.assertEqual( str(w.warnings[0].message), "Cannot load `alerting` directly to AsyncOpenSearch as it already exists. Use `AsyncOpenSearch.plugin.alerting` instead.", diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index 36571a71..3541fdec 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -26,7 +26,7 @@ # under the License. -from unittest import IsolatedAsyncioTestCase +from unittest import IsolatedAsyncioTestCase # type: ignore from opensearchpy._async.helpers.test import get_test_client from opensearchpy.connection.async_connections import add_connection @@ -34,7 +34,7 @@ from ...utils import wipe_cluster -class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): +class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): # type: ignore async def asyncSetUp(self) -> None: self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 908313ee..79952bc4 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -27,6 +27,7 @@ import asyncio +from typing import Any import pytest from _pytest.mark.structures import MarkDecorator @@ -39,15 +40,15 @@ pytestmark: MarkDecorator = pytest.mark.asyncio -@pytest.fixture(scope="function") -async def async_client(): +@pytest.fixture(scope="function") # type: ignore +async def async_client() -> Any: client = None try: if not hasattr(opensearchpy, "AsyncOpenSearch"): pytest.skip("test requires 'AsyncOpenSearch'") kw = {"timeout": 3} - client = opensearchpy.AsyncOpenSearch(OPENSEARCH_URL, **kw) + client = opensearchpy.AsyncOpenSearch(OPENSEARCH_URL, **kw) # type: ignore # wait for yellow status for _ in range(100): diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index 41a07012..323532c5 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -28,6 +28,8 @@ from __future__ import unicode_literals +from typing import Any + import pytest from _pytest.mark.structures import MarkDecorator @@ -35,19 +37,19 @@ class TestUnicode: - async def test_indices_analyze(self, async_client) -> None: + async def test_indices_analyze(self, async_client: Any) -> None: await async_client.indices.analyze(body='{"text": "привет"}') class TestBulk: - async def test_bulk_works_with_string_body(self, async_client) -> None: + async def test_bulk_works_with_string_body(self, async_client: Any) -> None: docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) assert response["errors"] is False assert len(response["items"]) == 1 - async def test_bulk_works_with_bytestring_body(self, async_client) -> None: + async def test_bulk_works_with_bytestring_body(self, async_client: Any) -> None: docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -57,7 +59,7 @@ async def test_bulk_works_with_bytestring_body(self, async_client) -> None: class TestYarlMissing: async def test_aiohttp_connection_works_without_yarl( - self, async_client, monkeypatch + self, async_client: Any, monkeypatch: Any ) -> None: # This is a defensive test case for if aiohttp suddenly stops using yarl. from opensearchpy._async import http_aiohttp diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 36ea7a10..69282ead 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -10,6 +10,7 @@ import re from datetime import datetime +from typing import Any import pytest from pytest import fixture @@ -34,32 +35,32 @@ pytestmark = pytest.mark.asyncio -@fixture(scope="function") -async def client(): +@fixture(scope="function") # type: ignore +async def client() -> Any: client = await get_test_client(verify_certs=False, http_auth=("admin", "admin")) await add_connection("default", client) return client -@fixture(scope="function") -async def opensearch_version(client): +@fixture(scope="function") # type: ignore +async def opensearch_version(client: Any) -> Any: info = await client.info() print(info) yield tuple( int(x) - for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") + for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") # type: ignore ) -@fixture -async def write_client(client): +@fixture # type: ignore +async def write_client(client: Any) -> Any: yield client await client.indices.delete("test-*", ignore=404) await client.indices.delete_template("test-template", ignore=404) -@fixture -async def data_client(client): +@fixture # type: ignore +async def data_client(client: Any) -> Any: # create mappings await create_git_index(client, "git") await create_flat_git_index(client, "flat-git") @@ -71,8 +72,8 @@ async def data_client(client): await client.indices.delete("flat-git", ignore=404) -@fixture -async def pull_request(write_client): +@fixture # type: ignore +async def pull_request(write_client: Any) -> Any: await PullRequest.init() pr = PullRequest( _id=42, @@ -95,8 +96,8 @@ async def pull_request(write_client): return pr -@fixture -async def setup_ubq_tests(client) -> str: +@fixture # type: ignore +async def setup_ubq_tests(client: Any) -> str: index = "test-git" await create_git_index(client, index) await async_bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index dee69819..3608d935 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -27,7 +27,7 @@ import asyncio -from typing import Tuple +from typing import Any, List import pytest from mock import MagicMock, patch @@ -40,19 +40,19 @@ class AsyncMock(MagicMock): - async def __call__(self, *args, **kwargs): + async def __call__(self, *args: Any, **kwargs: Any) -> Any: return super(AsyncMock, self).__call__(*args, **kwargs) - def __await__(self): + def __await__(self) -> Any: return self().__await__() class FailingBulkClient(object): def __init__( self, - client, - fail_at: Tuple[int] = (2,), - fail_with=TransportError(599, "Error!", {}), + client: Any, + fail_at: Any = (2,), + fail_with: TransportError = TransportError(599, "Error!", {}), ) -> None: self.client = client self._called = 0 @@ -60,7 +60,7 @@ def __init__( self.transport = client.transport self._fail_with = fail_with - async def bulk(self, *args, **kwargs): + async def bulk(self, *args: Any, **kwargs: Any) -> Any: self._called += 1 if self._called in self._fail_at: raise self._fail_with @@ -68,7 +68,7 @@ async def bulk(self, *args, **kwargs): class TestStreamingBulk(object): - async def test_actions_remain_unchanged(self, async_client) -> None: + async def test_actions_remain_unchanged(self, async_client: Any) -> None: actions1 = [{"_id": 1}, {"_id": 2}] async for ok, item in actions.async_streaming_bulk( async_client, actions1, index="test-index" @@ -76,7 +76,7 @@ async def test_actions_remain_unchanged(self, async_client) -> None: assert ok assert [{"_id": 1}, {"_id": 2}] == actions1 - async def test_all_documents_get_inserted(self, async_client) -> None: + async def test_all_documents_get_inserted(self, async_client: Any) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] async for ok, item in actions.async_streaming_bulk( async_client, docs, index="test-index", refresh=True @@ -88,13 +88,13 @@ async def test_all_documents_get_inserted(self, async_client) -> None: "_source" ] - async def test_documents_data_types(self, async_client): - async def async_gen(): + async def test_documents_data_types(self, async_client: Any) -> None: + async def async_gen() -> Any: for x in range(100): await asyncio.sleep(0) yield {"answer": x, "_id": x} - def sync_gen(): + def sync_gen() -> Any: for x in range(100): yield {"answer": x, "_id": x} @@ -123,7 +123,7 @@ def sync_gen(): ] async def test_all_errors_from_chunk_are_raised_on_failure( - self, async_client + self, async_client: Any ) -> None: await async_client.indices.create( "i", @@ -144,7 +144,7 @@ async def test_all_errors_from_chunk_are_raised_on_failure( else: assert False, "exception should have been raised" - async def test_different_op_types(self, async_client): + async def test_different_op_types(self, async_client: Any) -> None: await async_client.index(index="i", id=45, body={}) await async_client.index(index="i", id=42, body={}) docs = [ @@ -159,7 +159,7 @@ async def test_different_op_types(self, async_client): assert {"answer": 42} == (await async_client.get(index="i", id=42))["_source"] assert {"f": "v"} == (await async_client.get(index="i", id=47))["_source"] - async def test_transport_error_can_becaught(self, async_client): + async def test_transport_error_can_becaught(self, async_client: Any) -> None: failing_client = FailingBulkClient(async_client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -193,7 +193,7 @@ async def test_transport_error_can_becaught(self, async_client): } } == results[1][1] - async def test_rejected_documents_are_retried(self, async_client) -> None: + async def test_rejected_documents_are_retried(self, async_client: Any) -> None: failing_client = FailingBulkClient( async_client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -222,7 +222,7 @@ async def test_rejected_documents_are_retried(self, async_client) -> None: assert 4 == failing_client._called async def test_rejected_documents_are_retried_at_most_max_retries_times( - self, async_client + self, async_client: Any ) -> None: failing_client = FailingBulkClient( async_client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) @@ -253,7 +253,7 @@ async def test_rejected_documents_are_retried_at_most_max_retries_times( assert 4 == failing_client._called async def test_transport_error_is_raised_with_max_retries( - self, async_client + self, async_client: Any ) -> None: failing_client = FailingBulkClient( async_client, @@ -261,7 +261,7 @@ async def test_transport_error_is_raised_with_max_retries( fail_with=TransportError(429, "Rejected!", {}), ) - async def streaming_bulk(): + async def streaming_bulk() -> Any: results = [ x async for x in actions.async_streaming_bulk( @@ -280,7 +280,7 @@ async def streaming_bulk(): class TestBulk(object): - async def test_bulk_works_with_single_item(self, async_client) -> None: + async def test_bulk_works_with_single_item(self, async_client: Any) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -293,7 +293,7 @@ async def test_bulk_works_with_single_item(self, async_client) -> None: "_source" ] - async def test_all_documents_get_inserted(self, async_client) -> None: + async def test_all_documents_get_inserted(self, async_client: Any) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -306,7 +306,7 @@ async def test_all_documents_get_inserted(self, async_client) -> None: "_source" ] - async def test_stats_only_reports_numbers(self, async_client) -> None: + async def test_stats_only_reports_numbers(self, async_client: Any) -> None: docs = [{"answer": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True, stats_only=True @@ -316,7 +316,7 @@ async def test_stats_only_reports_numbers(self, async_client) -> None: assert 0 == failed assert 100 == (await async_client.count(index="test-index"))["count"] - async def test_errors_are_reported_correctly(self, async_client): + async def test_errors_are_reported_correctly(self, async_client: Any) -> None: await async_client.indices.create( "i", { @@ -333,6 +333,7 @@ async def test_errors_are_reported_correctly(self, async_client): raise_on_error=False, ) assert 1 == success + assert isinstance(failed, List) assert 1 == len(failed) error = failed[0] assert "42" == error["index"]["_id"] @@ -342,7 +343,7 @@ async def test_errors_are_reported_correctly(self, async_client): error["index"]["error"] ) or "mapper_parsing_exception" in repr(error["index"]["error"]) - async def test_error_is_raised(self, async_client): + async def test_error_is_raised(self, async_client: Any) -> None: await async_client.indices.create( "i", { @@ -355,7 +356,7 @@ async def test_error_is_raised(self, async_client): with pytest.raises(BulkIndexError): await actions.async_bulk(async_client, [{"a": 42}, {"a": "c"}], index="i") - async def test_ignore_error_if_raised(self, async_client): + async def test_ignore_error_if_raised(self, async_client: Any) -> None: # ignore the status code 400 in tuple await actions.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -388,7 +389,7 @@ async def test_ignore_error_if_raised(self, async_client): failing_client, [{"a": 42}], index="i", ignore_status=(599,) ) - async def test_errors_are_collected_properly(self, async_client): + async def test_errors_are_collected_properly(self, async_client: Any) -> None: await async_client.indices.create( "i", { @@ -410,10 +411,12 @@ async def test_errors_are_collected_properly(self, async_client): class MockScroll: + calls: Any + def __init__(self) -> None: self.calls = [] - async def __call__(self, *args, **kwargs): + async def __call__(self, *args: Any, **kwargs: Any) -> Any: self.calls.append((args, kwargs)) if len(self.calls) == 1: return { @@ -432,25 +435,27 @@ async def __call__(self, *args, **kwargs): class MockResponse: - def __init__(self, resp) -> None: + def __init__(self, resp: Any) -> None: self.resp = resp - async def __call__(self, *args, **kwargs): + async def __call__(self, *args: Any, **kwargs: Any) -> Any: return self.resp - def __await__(self): + def __await__(self) -> Any: return self().__await__() -@pytest.fixture(scope="function") -async def scan_teardown(async_client): +@pytest.fixture(scope="function") # type: ignore +async def scan_teardown(async_client: Any) -> Any: yield await async_client.clear_scroll(scroll_id="_all") class TestScan(object): - async def test_order_can_be_preserved(self, async_client, scan_teardown): - bulk = [] + async def test_order_can_be_preserved( + self, async_client: Any, scan_teardown: Any + ) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -470,8 +475,10 @@ async def test_order_can_be_preserved(self, async_client, scan_teardown): assert list(map(str, range(100))) == list(d["_id"] for d in docs) assert list(range(100)) == list(d["_source"]["answer"] for d in docs) - async def test_all_documents_are_read(self, async_client, scan_teardown): - bulk = [] + async def test_all_documents_are_read( + self, async_client: Any, scan_teardown: Any + ) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -486,8 +493,8 @@ async def test_all_documents_are_read(self, async_client, scan_teardown): assert set(map(str, range(100))) == set(d["_id"] for d in docs) assert set(range(100)) == set(d["_source"]["answer"] for d in docs) - async def test_scroll_error(self, async_client, scan_teardown): - bulk = [] + async def test_scroll_error(self, async_client: Any, scan_teardown: Any) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -522,7 +529,9 @@ async def test_scroll_error(self, async_client, scan_teardown): assert len(data) == 3 assert data[-1] == {"scroll_data": 42} - async def test_initial_search_error(self, async_client, scan_teardown): + async def test_initial_search_error( + self, async_client: Any, scan_teardown: Any + ) -> None: with patch.object(async_client, "clear_scroll", new_callable=AsyncMock): with patch.object( async_client, @@ -572,7 +581,9 @@ async def test_initial_search_error(self, async_client, scan_teardown): assert data == [{"search_data": 1}] assert mock_scroll.calls == [] - async def test_no_scroll_id_fast_route(self, async_client, scan_teardown) -> None: + async def test_no_scroll_id_fast_route( + self, async_client: Any, scan_teardown: Any + ) -> None: with patch.object(async_client, "search", MockResponse({"no": "_scroll_id"})): with patch.object(async_client, "scroll") as scroll_mock: with patch.object(async_client, "clear_scroll") as clear_mock: @@ -588,8 +599,10 @@ async def test_no_scroll_id_fast_route(self, async_client, scan_teardown) -> Non clear_mock.assert_not_called() @patch("opensearchpy._async.helpers.actions.logger") - async def test_logger(self, logger_mock, async_client, scan_teardown): - bulk = [] + async def test_logger( + self, logger_mock: Any, async_client: Any, scan_teardown: Any + ) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -629,8 +642,8 @@ async def test_logger(self, logger_mock, async_client, scan_teardown): 5, ) - async def test_clear_scroll(self, async_client, scan_teardown): - bulk = [] + async def test_clear_scroll(self, async_client: Any, scan_teardown: Any) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -665,7 +678,7 @@ async def test_clear_scroll(self, async_client, scan_teardown): ] spy.assert_not_called() - @pytest.mark.parametrize( + @pytest.mark.parametrize( # type: ignore "kwargs", [ {"api_key": ("name", "value")}, @@ -674,8 +687,8 @@ async def test_clear_scroll(self, async_client, scan_teardown): ], ) async def test_scan_auth_kwargs_forwarded( - self, async_client, scan_teardown, kwargs - ): + self, async_client: Any, scan_teardown: Any, kwargs: Any + ) -> None: ((key, val),) = kwargs.items() with patch.object( @@ -716,8 +729,8 @@ async def test_scan_auth_kwargs_forwarded( assert api_mock.call_args[1][key] == val async def test_scan_auth_kwargs_favor_scroll_kwargs_option( - self, async_client, scan_teardown - ): + self, async_client: Any, scan_teardown: Any + ) -> None: with patch.object( async_client, "search", @@ -765,9 +778,9 @@ async def test_scan_auth_kwargs_favor_scroll_kwargs_option( assert async_client.scroll.call_args[1]["sort"] == "asc" -@pytest.fixture(scope="function") -async def reindex_setup(async_client): - bulk = [] +@pytest.fixture(scope="function") # type: ignore +async def reindex_setup(async_client: Any) -> Any: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append( @@ -783,7 +796,7 @@ async def reindex_setup(async_client): class TestReindex(object): async def test_reindex_passes_kwargs_to_scan_and_bulk( - self, async_client, reindex_setup + self, async_client: Any, reindex_setup: Any ) -> None: await actions.async_reindex( async_client, @@ -803,7 +816,9 @@ async def test_reindex_passes_kwargs_to_scan_and_bulk( await async_client.get(index="prod_index", id=42) )["_source"] - async def test_reindex_accepts_a_query(self, async_client, reindex_setup) -> None: + async def test_reindex_accepts_a_query( + self, async_client: Any, reindex_setup: Any + ) -> None: await actions.async_reindex( async_client, "test_index", @@ -822,7 +837,9 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup) -> Non await async_client.get(index="prod_index", id=42) )["_source"] - async def test_all_documents_get_moved(self, async_client, reindex_setup) -> None: + async def test_all_documents_get_moved( + self, async_client: Any, reindex_setup: Any + ) -> None: await actions.async_reindex(async_client, "test_index", "prod_index") await async_client.indices.refresh() @@ -843,8 +860,8 @@ async def test_all_documents_get_moved(self, async_client, reindex_setup) -> Non )["_source"] -@pytest.fixture(scope="function") -async def parent_reindex_setup(async_client): +@pytest.fixture(scope="function") # type: ignore +async def parent_reindex_setup(async_client: Any) -> None: body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { @@ -873,8 +890,8 @@ async def parent_reindex_setup(async_client): class TestParentChildReindex: async def test_children_are_reindexed_correctly( - self, async_client, parent_reindex_setup - ): + self, async_client: Any, parent_reindex_setup: Any + ) -> None: await actions.async_reindex(async_client, "test-index", "real-index") assert {"question_answer": "question"} == ( await async_client.get(index="real-index", id=42) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index 99f2486d..7a23b8b1 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -13,7 +13,7 @@ from typing import Any, Dict -async def create_flat_git_index(client, index): +async def create_flat_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -56,7 +56,7 @@ async def create_flat_git_index(client, index): ) -async def create_git_index(client, index): +async def create_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -1078,7 +1078,7 @@ async def create_git_index(client, index): ] -def flatten_doc(d) -> Dict[str, Any]: +def flatten_doc(d: Any) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1087,7 +1087,7 @@ def flatten_doc(d) -> Dict[str, Any]: FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d) -> Dict[str, Any]: +def create_test_git_data(d: Any) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 67982918..8e4e95e2 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -10,6 +10,7 @@ from datetime import datetime from ipaddress import ip_address +from typing import Any, Optional import pytest from pytest import raises @@ -63,7 +64,7 @@ class Repository(AsyncDocument): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Optional[str] = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -116,7 +117,7 @@ class Index: name = "test-serialization" -async def test_serialization(write_client): +async def test_serialization(write_client: Any) -> None: await SerializationDoc.init() await write_client.index( index="test-serialization", @@ -129,7 +130,7 @@ async def test_serialization(write_client): "ip": ["::1", "127.0.0.1", None], }, ) - sd = await SerializationDoc.get(id=42) + sd: Any = await SerializationDoc.get(id=42) assert sd.i == [1, 2, 3, None] assert sd.b == [True, False, True, False, None] @@ -146,7 +147,7 @@ async def test_serialization(write_client): } -async def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: +async def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: history_query = Q( "nested", path="comments.history", @@ -174,7 +175,7 @@ async def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: assert "score" in history.meta -async def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None: +async def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -189,7 +190,7 @@ async def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None assert isinstance(pr.comments[0].created_at, datetime) -async def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: +async def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -201,7 +202,7 @@ async def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -async def test_update_object_field(write_client) -> None: +async def test_update_object_field(write_client: Any) -> None: await Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -221,7 +222,7 @@ async def test_update_object_field(write_client) -> None: assert w.ranked == {"test1": 0.1, "topic2": 0.2} -async def test_update_script(write_client) -> None: +async def test_update_script(write_client: Any) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -231,7 +232,7 @@ async def test_update_script(write_client) -> None: assert w.views == 47 -async def test_update_retry_on_conflict(write_client) -> None: +async def test_update_retry_on_conflict(write_client: Any) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -249,8 +250,10 @@ async def test_update_retry_on_conflict(write_client) -> None: assert w.views == 52 -@pytest.mark.parametrize("retry_on_conflict", [None, 0]) -async def test_update_conflicting_version(write_client, retry_on_conflict) -> None: +@pytest.mark.parametrize("retry_on_conflict", [None, 0]) # type: ignore +async def test_update_conflicting_version( + write_client: Any, retry_on_conflict: bool +) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -267,7 +270,7 @@ async def test_update_conflicting_version(write_client, retry_on_conflict) -> No ) -async def test_save_and_update_return_doc_meta(write_client) -> None: +async def test_save_and_update_return_doc_meta(write_client: Any) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = await w.save(return_doc_meta=True) @@ -291,33 +294,33 @@ async def test_save_and_update_return_doc_meta(write_client) -> None: assert resp.keys().__contains__("_version") -async def test_init(write_client) -> None: +async def test_init(write_client: Any) -> None: await Repository.init(index="test-git") assert await write_client.indices.exists(index="test-git") -async def test_get_raises_404_on_index_missing(data_client) -> None: +async def test_get_raises_404_on_index_missing(data_client: Any) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php", index="not-there") -async def test_get_raises_404_on_non_existent_id(data_client) -> None: +async def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php") -async def test_get_returns_none_if_404_ignored(data_client) -> None: +async def test_get_returns_none_if_404_ignored(data_client: Any) -> None: assert None is await Repository.get("opensearch-dsl-php", ignore=404) async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( - data_client, + data_client: Any, ) -> None: assert None is await Repository.get("42", index="not-there", ignore=404) -async def test_get(data_client) -> None: +async def test_get(data_client: Any) -> None: opensearch_repo = await Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -325,15 +328,15 @@ async def test_get(data_client) -> None: assert datetime(2014, 3, 3) == opensearch_repo.created_at -async def test_exists_return_true(data_client) -> None: +async def test_exists_return_true(data_client: Any) -> None: assert await Repository.exists("opensearch-py") -async def test_exists_false(data_client) -> None: +async def test_exists_false(data_client: Any) -> None: assert not await Repository.exists("opensearch-dsl-php") -async def test_get_with_tz_date(data_client) -> None: +async def test_get_with_tz_date(data_client: Any) -> None: first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -345,7 +348,7 @@ async def test_get_with_tz_date(data_client) -> None: ) -async def test_save_with_tz_date(data_client) -> None: +async def test_save_with_tz_date(data_client: Any) -> None: tzinfo = timezone("Europe/Prague") first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -372,7 +375,7 @@ async def test_save_with_tz_date(data_client) -> None: ] -async def test_mget(data_client) -> None: +async def test_mget(data_client: Any) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -380,25 +383,27 @@ async def test_mget(data_client) -> None: assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_mget_raises_exception_when_missing_param_is_invalid(data_client) -> None: +async def test_mget_raises_exception_when_missing_param_is_invalid( + data_client: Any, +) -> None: with raises(ValueError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -async def test_mget_raises_404_when_missing_param_is_raise(data_client) -> None: +async def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: with raises(NotFoundError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") async def test_mget_ignores_missing_docs_when_missing_param_is_skip( - data_client, + data_client: Any, ) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_update_works_from_search_response(data_client) -> None: +async def test_update_works_from_search_response(data_client: Any) -> None: opensearch_repo = (await Repository.search().execute())[0] await opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -409,7 +414,7 @@ async def test_update_works_from_search_response(data_client) -> None: assert "opensearch" == new_version.owner.name -async def test_update(data_client) -> None: +async def test_update(data_client: Any) -> None: opensearch_repo = await Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -433,7 +438,7 @@ async def test_update(data_client) -> None: assert "primary_term" in new_version.meta -async def test_save_updates_existing_doc(data_client) -> None: +async def test_save_updates_existing_doc(data_client: Any) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -446,7 +451,9 @@ async def test_save_updates_existing_doc(data_client) -> None: assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -async def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> None: +async def test_save_automatically_uses_seq_no_and_primary_term( + data_client: Any, +) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -454,7 +461,9 @@ async def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> N await opensearch_repo.save() -async def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> None: +async def test_delete_automatically_uses_seq_no_and_primary_term( + data_client: Any, +) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -462,13 +471,13 @@ async def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> await opensearch_repo.delete() -async def assert_doc_equals(expected, actual) -> None: +async def assert_doc_equals(expected: Any, actual: Any) -> None: async for f in aiter(expected): assert f in actual assert actual[f] == expected[f] -async def test_can_save_to_different_index(write_client): +async def test_can_save_to_different_index(write_client: Any) -> None: test_repo = Repository(description="testing", meta={"id": 42}) assert await test_repo.save(index="test-document") @@ -483,7 +492,9 @@ async def test_can_save_to_different_index(write_client): ) -async def test_save_without_skip_empty_will_include_empty_fields(write_client) -> None: +async def test_save_without_skip_empty_will_include_empty_fields( + write_client: Any, +) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert await test_repo.save(index="test-document", skip_empty=False) @@ -498,7 +509,7 @@ async def test_save_without_skip_empty_will_include_empty_fields(write_client) - ) -async def test_delete(write_client) -> None: +async def test_delete(write_client: Any) -> None: await write_client.create( index="test-document", id="opensearch-py", @@ -519,11 +530,11 @@ async def test_delete(write_client) -> None: ) -async def test_search(data_client) -> None: +async def test_search(data_client: Any) -> None: assert await Repository.search().count() == 1 -async def test_search_returns_proper_doc_classes(data_client) -> None: +async def test_search_returns_proper_doc_classes(data_client: Any) -> None: result = await Repository.search().execute() opensearch_repo = result.hits[0] @@ -532,8 +543,10 @@ async def test_search_returns_proper_doc_classes(data_client) -> None: assert opensearch_repo.owner.name == "opensearch" -async def test_refresh_mapping(data_client) -> None: +async def test_refresh_mapping(data_client: Any) -> None: class Commit(AsyncDocument): + _index: Any + class Index: name = "git" @@ -546,7 +559,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -async def test_highlight_in_meta(data_client) -> None: +async def test_highlight_in_meta(data_client: Any) -> None: commit = ( await Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index bc7abbd8..b03fefe8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -9,6 +9,7 @@ # GitHub history for details. from datetime import datetime +from typing import Any import pytest from _pytest.mark.structures import MarkDecorator @@ -54,8 +55,8 @@ class MetricSearch(AsyncFacetedSearch): } -@pytest.fixture(scope="function") -def commit_search_cls(opensearch_version): +@pytest.fixture(scope="function") # type: ignore +def commit_search_cls(opensearch_version: Any) -> Any: interval_kwargs = {"fixed_interval": "1d"} class CommitSearch(AsyncFacetedSearch): @@ -79,8 +80,8 @@ class CommitSearch(AsyncFacetedSearch): return CommitSearch -@pytest.fixture(scope="function") -def repo_search_cls(opensearch_version): +@pytest.fixture(scope="function") # type: ignore +def repo_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class RepoSearch(AsyncFacetedSearch): @@ -93,15 +94,15 @@ class RepoSearch(AsyncFacetedSearch): ), } - def search(self): + def search(self) -> Any: s = super(RepoSearch, self).search() return s.filter("term", commit_repo="repo") return RepoSearch -@pytest.fixture(scope="function") -def pr_search_cls(opensearch_version): +@pytest.fixture(scope="function") # type: ignore +def pr_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class PRSearch(AsyncFacetedSearch): @@ -119,7 +120,7 @@ class PRSearch(AsyncFacetedSearch): return PRSearch -async def test_facet_with_custom_metric(data_client) -> None: +async def test_facet_with_custom_metric(data_client: Any) -> None: ms = MetricSearch() r = await ms.execute() @@ -128,7 +129,7 @@ async def test_facet_with_custom_metric(data_client) -> None: assert dates[0] == 1399038439000 -async def test_nested_facet(pull_request, pr_search_cls) -> None: +async def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls() r = await prs.execute() @@ -136,7 +137,7 @@ async def test_nested_facet(pull_request, pr_search_cls) -> None: assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -async def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: +async def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = await prs.execute() @@ -148,7 +149,7 @@ async def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: assert not r.hits -async def test_datehistogram_facet(data_client, repo_search_cls) -> None: +async def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = await rs.execute() @@ -156,7 +157,7 @@ async def test_datehistogram_facet(data_client, repo_search_cls) -> None: assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -async def test_boolean_facet(data_client, repo_search_cls) -> None: +async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = await rs.execute() @@ -167,7 +168,7 @@ async def test_boolean_facet(data_client, repo_search_cls) -> None: async def test_empty_search_finds_everything( - data_client, opensearch_version, commit_search_cls + data_client: Any, opensearch_version: Any, commit_search_cls: Any ) -> None: cs = commit_search_cls() r = await cs.execute() @@ -213,7 +214,7 @@ async def test_empty_search_finds_everything( async def test_term_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls + data_client: Any, commit_search_cls: Any ) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) @@ -259,7 +260,7 @@ async def test_term_filters_are_shown_as_selected_and_data_is_filtered( async def test_range_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls + data_client: Any, commit_search_cls: Any ) -> None: cs = commit_search_cls(filters={"deletions": "better"}) @@ -268,7 +269,7 @@ async def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -async def test_pagination(data_client, commit_search_cls) -> None: +async def test_pagination(data_client: Any, commit_search_cls: Any) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index f11e6d3f..14b87e15 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + import pytest from _pytest.mark.structures import MarkDecorator @@ -24,7 +26,7 @@ class Post(AsyncDocument): published_from = Date() -async def test_index_template_works(write_client) -> None: +async def test_index_template_works(write_client: Any) -> None: it = AsyncIndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -45,7 +47,7 @@ async def test_index_template_works(write_client) -> None: } == await write_client.indices.get_mapping(index="test-blog") -async def test_index_can_be_saved_even_with_settings(write_client) -> None: +async def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: i = AsyncIndex("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) await i.save() @@ -60,12 +62,14 @@ async def test_index_can_be_saved_even_with_settings(write_client) -> None: ) -async def test_index_exists(data_client) -> None: +async def test_index_exists(data_client: Any) -> None: assert await AsyncIndex("git").exists() assert not await AsyncIndex("not-there").exists() -async def test_index_can_be_created_with_settings_and_mappings(write_client) -> None: +async def test_index_can_be_created_with_settings_and_mappings( + write_client: Any, +) -> None: i = AsyncIndex("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -90,7 +94,7 @@ async def test_index_can_be_created_with_settings_and_mappings(write_client) -> } -async def test_delete(write_client) -> None: +async def test_delete(write_client: Any) -> None: await write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -101,9 +105,9 @@ async def test_delete(write_client) -> None: assert not await write_client.indices.exists(index="test-index") -async def test_multiple_indices_with_same_doc_type_work(write_client) -> None: - i1 = AsyncIndex("test-index-1", using=write_client) - i2 = AsyncIndex("test-index-2", using=write_client) +async def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: + i1: Any = AsyncIndex("test-index-1", using=write_client) + i2: Any = AsyncIndex("test-index-2", using=write_client) for i in i1, i2: i.document(Post) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index 6be391b3..35a4e8d8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + import pytest from _pytest.mark.structures import MarkDecorator from pytest import raises @@ -19,7 +21,7 @@ pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_mapping_saved_into_opensearch(write_client) -> None: +async def test_mapping_saved_into_opensearch(write_client: Any) -> None: m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -40,7 +42,7 @@ async def test_mapping_saved_into_opensearch(write_client) -> None: async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( - write_client, + write_client: Any, ) -> None: m = mapping.AsyncMapping() m.field( @@ -65,7 +67,7 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( async def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( - write_client, + write_client: Any, ) -> None: m = mapping.AsyncMapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -95,7 +97,7 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_with_anal } == await write_client.indices.get_mapping(index="test-mapping") -async def test_mapping_gets_updated_from_opensearch(write_client): +async def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: await write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index 2b995c54..8431fa4a 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -10,6 +10,8 @@ from __future__ import unicode_literals +from typing import Any + import pytest from _pytest.mark.structures import MarkDecorator from pytest import raises @@ -29,7 +31,7 @@ class Repository(AsyncDocument): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Any = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -41,7 +43,7 @@ class Index: name = "flat-git" -async def test_filters_aggregation_buckets_are_accessible(data_client) -> None: +async def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -62,7 +64,7 @@ async def test_filters_aggregation_buckets_are_accessible(data_client) -> None: ) -async def test_top_hits_are_wrapped_in_response(data_client) -> None: +async def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -78,7 +80,7 @@ async def test_top_hits_are_wrapped_in_response(data_client) -> None: assert isinstance(hits[0], Commit) -async def test_inner_hits_are_wrapped_in_response(data_client) -> None: +async def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: s = AsyncSearch(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -89,7 +91,7 @@ async def test_inner_hits_are_wrapped_in_response(data_client) -> None: assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: +async def test_scan_respects_doc_types(data_client: Any) -> None: result = Repository.search().scan() repos = await get_result(result) @@ -98,7 +100,7 @@ async def test_scan_respects_doc_types(data_client) -> None: assert repos[0].organization == "opensearch" -async def test_scan_iterates_through_all_docs(data_client) -> None: +async def test_scan_iterates_through_all_docs(data_client: Any) -> None: s = AsyncSearch(index="flat-git") result = s.scan() commits = await get_result(result) @@ -107,14 +109,14 @@ async def test_scan_iterates_through_all_docs(data_client) -> None: assert {d["_id"] for d in FLAT_DATA} == {c.meta.id for c in commits} -async def get_result(b): +async def get_result(b: Any) -> Any: a = [] async for i in b: a.append(i) return a -async def test_multi_search(data_client) -> None: +async def test_multi_search(data_client: Any) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") @@ -131,7 +133,7 @@ async def test_multi_search(data_client) -> None: assert r2._search is s2 -async def test_multi_missing(data_client) -> None: +async def test_multi_missing(data_client: Any) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") s3 = AsyncSearch(index="does_not_exist") @@ -154,7 +156,7 @@ async def test_multi_missing(data_client) -> None: assert r3 is None -async def test_raw_subfield_can_be_used_in_aggs(data_client) -> None: +async def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: s = AsyncSearch(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) r = await s.execute() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index 4dcf32b3..46e515df 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -8,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + import pytest from _pytest.mark.structures import MarkDecorator @@ -17,7 +19,9 @@ pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: +async def test_update_by_query_no_script( + write_client: Any, setup_ubq_tests: Any +) -> None: index = setup_ubq_tests ubq = ( @@ -36,7 +40,9 @@ async def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: assert response.success() -async def test_update_by_query_with_script(write_client, setup_ubq_tests) -> None: +async def test_update_by_query_with_script( + write_client: Any, setup_ubq_tests: Any +) -> None: index = setup_ubq_tests ubq = ( @@ -53,7 +59,9 @@ async def test_update_by_query_with_script(write_client, setup_ubq_tests) -> Non assert response.version_conflicts == 0 -async def test_delete_by_query_with_script(write_client, setup_ubq_tests) -> None: +async def test_delete_by_query_with_script( + write_client: Any, setup_ubq_tests: Any +) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index 88b792db..5af06a24 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -28,7 +28,7 @@ class TestAlertingPlugin(AsyncOpenSearchTestCase): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_create_destination(self): + async def test_create_destination(self) -> None: # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -59,7 +59,7 @@ async def test_get_destination(self) -> None: (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_create_monitor(self): + async def test_create_monitor(self) -> None: # Create a dummy destination await self.test_create_destination() diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index 0efcd25e..53aeb3ad 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -33,6 +33,7 @@ """ import inspect import warnings +from typing import Any import pytest from _pytest.mark.structures import MarkDecorator @@ -53,14 +54,14 @@ OPENSEARCH_VERSION = None -async def await_if_coro(x): +async def await_if_coro(x: Any) -> Any: if inspect.iscoroutine(x): return await x return x class AsyncYamlRunner(YamlRunner): - async def setup(self): + async def setup(self) -> None: # Pull skips from individual tests to not do unnecessary setup. skip_code = [] for action in self._run_code: @@ -78,12 +79,12 @@ async def setup(self): if self._setup_code: await self.run_code(self._setup_code) - async def teardown(self) -> None: + async def teardown(self) -> Any: if self._teardown_code: self.section("teardown") await self.run_code(self._teardown_code) - async def opensearch_version(self): + async def opensearch_version(self) -> Any: global OPENSEARCH_VERSION if OPENSEARCH_VERSION is None: version_string = (await self.client.info())["version"]["number"] @@ -93,10 +94,10 @@ async def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 999 for v in version) return OPENSEARCH_VERSION - def section(self, name) -> None: + def section(self, name: str) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - async def run(self) -> None: + async def run(self) -> Any: try: await self.setup() self.section("test") @@ -107,7 +108,7 @@ async def run(self) -> None: except Exception: pass - async def run_code(self, test) -> None: + async def run_code(self, test: Any) -> Any: """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 @@ -119,7 +120,7 @@ async def run_code(self, test) -> None: else: raise RuntimeError("Invalid action type %r" % (action_type,)) - async def run_do(self, action) -> None: + async def run_do(self, action: Any) -> Any: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -171,7 +172,7 @@ async def run_do(self, action) -> None: # Filter out warnings raised by other components. caught_warnings = [ - str(w.message) + str(w.message) # type: ignore for w in caught_warnings if w.category == OpenSearchWarning and str(w.message) not in allowed_warnings @@ -179,13 +180,13 @@ async def run_do(self, action) -> None: # Sorting removes the issue with order raised. We only care about # if all warnings are raised in the single API call. - if warn and sorted(warn) != sorted(caught_warnings): + if warn and sorted(warn) != sorted(caught_warnings): # type: ignore raise AssertionError( "Expected warnings not equal to actual warnings: expected=%r actual=%r" % (warn, caught_warnings) ) - async def run_skip(self, skip) -> None: + async def run_skip(self, skip: Any) -> Any: if "features" in skip: features = skip["features"] if not isinstance(features, (tuple, list)): @@ -205,19 +206,19 @@ async def run_skip(self, skip) -> None: if min_version <= (await self.opensearch_version()) <= max_version: pytest.skip(reason) - async def _feature_enabled(self, name) -> bool: + async def _feature_enabled(self, name: str) -> Any: return False -@pytest.fixture(scope="function") -def async_runner(async_client): +@pytest.fixture(scope="function") # type: ignore +def async_runner(async_client: Any) -> AsyncYamlRunner: return AsyncYamlRunner(async_client) if RUN_ASYNC_REST_API_TESTS: - @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - async def test_rest_api_spec(test_spec, async_runner) -> None: + @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore + async def test_rest_api_spec(test_spec: Any, async_runner: Any) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") async_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 9fe8d9d1..6751ed29 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -11,7 +11,7 @@ from __future__ import unicode_literals -from unittest import IsolatedAsyncioTestCase +from unittest import IsolatedAsyncioTestCase # type: ignore import pytest from _pytest.mark.structures import MarkDecorator @@ -23,7 +23,7 @@ pytestmark: MarkDecorator = pytest.mark.asyncio -class TestSecurityPlugin(IsolatedAsyncioTestCase): +class TestSecurityPlugin(IsolatedAsyncioTestCase): # type: ignore ROLE_NAME = "test-role" ROLE_CONTENT = { "cluster_permissions": ["cluster_monitor"], @@ -123,7 +123,7 @@ async def test_create_user_with_body_param_empty(self) -> None: else: assert False - async def test_create_user_with_role(self): + async def test_create_user_with_role(self) -> None: await self.test_create_role() # Test to create user diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 50d734bc..319340da 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -18,7 +18,7 @@ class TestAsyncSigner: - def mock_session(self): + def mock_session(self) -> Mock: access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -37,7 +37,7 @@ async def test_aws_signer_async_as_http_auth(self) -> None: from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth auth = AWSV4SignerAsyncAuth(self.mock_session(), region) - headers = auth("GET", "http://localhost", {}, {}) + headers = auth("GET", "http://localhost") assert "Authorization" in headers assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers @@ -48,7 +48,7 @@ async def test_aws_signer_async_when_region_is_null(self) -> None: from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth with pytest.raises(ValueError) as e: - AWSV4SignerAsyncAuth(session, None) + AWSV4SignerAsyncAuth(session, None) # type: ignore assert str(e.value) == "Region cannot be empty" with pytest.raises(ValueError) as e: @@ -71,7 +71,7 @@ async def test_aws_signer_async_when_service_is_specified(self) -> None: from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth auth = AWSV4SignerAsyncAuth(self.mock_session(), region, service) - headers = auth("GET", "http://localhost", {}, {}) + headers = auth("GET", "http://localhost") assert "Authorization" in headers assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers @@ -79,7 +79,7 @@ async def test_aws_signer_async_when_service_is_specified(self) -> None: class TestAsyncSignerWithFrozenCredentials(TestAsyncSigner): - def mock_session(self, disable_get_frozen: bool = True): + def mock_session(self, disable_get_frozen: bool = True) -> Mock: access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -99,7 +99,7 @@ async def test_aws_signer_async_frozen_credentials_as_http_auth(self) -> None: mock_session = self.mock_session() auth = AWSV4SignerAsyncAuth(mock_session, region) - headers = auth("GET", "http://localhost", {}, {}) + headers = auth("GET", "http://localhost") assert "Authorization" in headers assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index 4dabee05..4ef80707 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -45,16 +45,16 @@ class DummyConnection(Connection): - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) self.delay = kwargs.pop("delay", 0) - self.calls = [] + self.calls: Any = [] self.closed = False super(DummyConnection, self).__init__(**kwargs) - async def perform_request(self, *args, **kwargs) -> Any: + async def perform_request(self, *args: Any, **kwargs: Any) -> Any: if self.closed: raise RuntimeError("This connection is closed") if self.delay: @@ -123,15 +123,15 @@ async def close(self) -> None: class TestTransport: async def test_single_connection_uses_dummy_connection_pool(self) -> None: - t = AsyncTransport([{}]) - await t._async_call() - assert isinstance(t.connection_pool, DummyConnectionPool) - t = AsyncTransport([{"host": "localhost"}]) - await t._async_call() - assert isinstance(t.connection_pool, DummyConnectionPool) + t1: Any = AsyncTransport([{}]) + await t1._async_call() + assert isinstance(t1.connection_pool, DummyConnectionPool) + t2: Any = AsyncTransport([{"host": "localhost"}]) + await t2._async_call() + assert isinstance(t2.connection_pool, DummyConnectionPool) async def test_request_timeout_extracted_from_params_and_passed(self) -> None: - t = AsyncTransport([{}], connection_class=DummyConnection) + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"request_timeout": 42}) assert 1 == len(t.get_connection().calls) @@ -143,7 +143,7 @@ async def test_request_timeout_extracted_from_params_and_passed(self) -> None: } == t.get_connection().calls[0][1] async def test_timeout_extracted_from_params_and_passed(self) -> None: - t = AsyncTransport([{}], connection_class=DummyConnection) + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"timeout": 84}) assert 1 == len(t.get_connection().calls) @@ -154,8 +154,10 @@ async def test_timeout_extracted_from_params_and_passed(self) -> None: "headers": None, } == t.get_connection().calls[0][1] - async def test_opaque_id(self): - t = AsyncTransport([{}], opaque_id="app-1", connection_class=DummyConnection) + async def test_opaque_id(self) -> None: + t: Any = AsyncTransport( + [{}], opaque_id="app-1", connection_class=DummyConnection + ) await t.perform_request("GET", "/") assert 1 == len(t.get_connection().calls) @@ -176,8 +178,8 @@ async def test_opaque_id(self): "headers": {"x-opaque-id": "request-1"}, } == t.get_connection().calls[1][1] - async def test_request_with_custom_user_agent_header(self): - t = AsyncTransport([{}], connection_class=DummyConnection) + async def test_request_with_custom_user_agent_header(self) -> None: + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request( "GET", "/", headers={"user-agent": "my-custom-value/1.2.3"} @@ -190,7 +192,7 @@ async def test_request_with_custom_user_agent_header(self): } == t.get_connection().calls[0][1] async def test_send_get_body_as_source(self) -> None: - t = AsyncTransport( + t: Any = AsyncTransport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -199,7 +201,7 @@ async def test_send_get_body_as_source(self) -> None: assert ("GET", "/", {"source": "{}"}, None) == t.get_connection().calls[0][0] async def test_send_get_body_as_post(self) -> None: - t = AsyncTransport( + t: Any = AsyncTransport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -208,7 +210,7 @@ async def test_send_get_body_as_post(self) -> None: assert ("POST", "/", None, b"{}") == t.get_connection().calls[0][0] async def test_body_gets_encoded_into_bytes(self) -> None: - t = AsyncTransport([{}], connection_class=DummyConnection) + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好") assert 1 == len(t.get_connection().calls) @@ -220,7 +222,7 @@ async def test_body_gets_encoded_into_bytes(self) -> None: ) == t.get_connection().calls[0][0] async def test_body_bytes_get_passed_untouched(self) -> None: - t = AsyncTransport([{}], connection_class=DummyConnection) + t: Any = AsyncTransport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" await t.perform_request("GET", "/", body=body) @@ -228,7 +230,7 @@ async def test_body_bytes_get_passed_untouched(self) -> None: assert ("GET", "/", None, body) == t.get_connection().calls[0][0] async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: - t = AsyncTransport([{}], connection_class=DummyConnection) + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好\uda6a") assert 1 == len(t.get_connection().calls) @@ -240,36 +242,36 @@ async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: ) == t.get_connection().calls[0][0] async def test_kwargs_passed_on_to_connections(self) -> None: - t = AsyncTransport([{"host": "google.com"}], port=123) + t: Any = AsyncTransport([{"host": "google.com"}], port=123) await t._async_call() assert 1 == len(t.connection_pool.connections) assert "http://google.com:123" == t.connection_pool.connections[0].host async def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() - t = AsyncTransport([{}, {}], dead_timeout=dt) + t: Any = AsyncTransport([{}, {}], dead_timeout=dt) await t._async_call() assert dt is t.connection_pool.dead_timeout async def test_custom_connection_class(self) -> None: class MyConnection(object): - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs - t = AsyncTransport([{}], connection_class=MyConnection) + t: Any = AsyncTransport([{}], connection_class=MyConnection) await t._async_call() assert 1 == len(t.connection_pool.connections) assert isinstance(t.connection_pool.connections[0], MyConnection) async def test_add_connection(self) -> None: - t = AsyncTransport([{}], randomize_hosts=False) + t: Any = AsyncTransport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) assert 2 == len(t.connection_pool.connections) assert "http://google.com:1234" == t.connection_pool.connections[1].host async def test_request_will_fail_after_X_retries(self) -> None: - t = AsyncTransport( + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, ) @@ -284,7 +286,7 @@ async def test_request_will_fail_after_X_retries(self) -> None: assert 4 == len(t.get_connection().calls) async def test_failed_connection_will_be_marked_as_dead(self) -> None: - t = AsyncTransport( + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, ) @@ -302,7 +304,7 @@ async def test_resurrected_connection_will_be_marked_as_live_on_success( self, ) -> None: for method in ("GET", "HEAD"): - t = AsyncTransport([{}, {}], connection_class=DummyConnection) + t: Any = AsyncTransport([{}, {}], connection_class=DummyConnection) await t._async_call() con1 = t.connection_pool.get_connection() con2 = t.connection_pool.get_connection() @@ -314,7 +316,9 @@ async def test_resurrected_connection_will_be_marked_as_live_on_success( assert 1 == len(t.connection_pool.dead_count) async def test_sniff_will_use_seed_connections(self) -> None: - t = AsyncTransport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) + t: Any = AsyncTransport( + [{"data": CLUSTER_NODES}], connection_class=DummyConnection + ) await t._async_call() t.set_connections([{"data": "invalid"}]) @@ -323,7 +327,7 @@ async def test_sniff_will_use_seed_connections(self) -> None: assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: - t = AsyncTransport( + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -335,7 +339,7 @@ async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: - t = AsyncTransport( + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -349,7 +353,7 @@ async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: ].calls[0] async def test_sniff_uses_sniff_timeout(self) -> None: - t = AsyncTransport( + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_timeout=42, @@ -361,8 +365,8 @@ async def test_sniff_uses_sniff_timeout(self) -> None: 0 ].calls[0] - async def test_sniff_reuses_connection_instances_if_possible(self): - t = AsyncTransport( + async def test_sniff_reuses_connection_instances_if_possible(self) -> None: + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, randomize_hosts=False, @@ -375,8 +379,8 @@ async def test_sniff_reuses_connection_instances_if_possible(self): assert 1 == len(t.connection_pool.connections) assert connection is t.get_connection() - async def test_sniff_on_fail_triggers_sniffing_on_fail(self): - t = AsyncTransport( + async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -398,9 +402,11 @@ async def test_sniff_on_fail_triggers_sniffing_on_fail(self): assert "http://1.1.1.1:123" == t.get_connection().host @patch("opensearchpy._async.transport.AsyncTransport.sniff_hosts") - async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): + async def test_sniff_on_fail_failing_does_not_prevent_retires( + self, sniff_hosts: Any + ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] - t = AsyncTransport( + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -416,8 +422,8 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts) assert 1 == len(conn_err.calls) assert 1 == len(conn_data.calls) - async def test_sniff_after_n_seconds(self, event_loop) -> None: - t = AsyncTransport( + async def test_sniff_after_n_seconds(self, event_loop: Any) -> None: + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniffer_timeout=5, @@ -440,7 +446,7 @@ async def test_sniff_after_n_seconds(self, event_loop) -> None: async def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. - t = AsyncTransport( + t: Any = AsyncTransport( [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, @@ -454,22 +460,24 @@ async def test_sniff_7x_publish_host(self) -> None: } async def test_transport_close_closes_all_pool_connections(self) -> None: - t = AsyncTransport([{}], connection_class=DummyConnection) - await t._async_call() + t1: Any = AsyncTransport([{}], connection_class=DummyConnection) + await t1._async_call() - assert not any([conn.closed for conn in t.connection_pool.connections]) - await t.close() - assert all([conn.closed for conn in t.connection_pool.connections]) + assert not any([conn.closed for conn in t1.connection_pool.connections]) + await t1.close() + assert all([conn.closed for conn in t1.connection_pool.connections]) - t = AsyncTransport([{}, {}], connection_class=DummyConnection) - await t._async_call() + t2: Any = AsyncTransport([{}, {}], connection_class=DummyConnection) + await t2._async_call() - assert not any([conn.closed for conn in t.connection_pool.connections]) - await t.close() - assert all([conn.closed for conn in t.connection_pool.connections]) + assert not any([conn.closed for conn in t2.connection_pool.connections]) + await t2.close() + assert all([conn.closed for conn in t2.connection_pool.connections]) - async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop) -> None: - t = AsyncTransport( + async def test_sniff_on_start_error_if_no_sniffed_hosts( + self, event_loop: Any + ) -> None: + t: Any = AsyncTransport( [ {"data": ""}, {"data": ""}, @@ -485,8 +493,10 @@ async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop) -> Non await t._async_call() assert str(e.value) == "TransportError(N/A, 'Unable to sniff hosts.')" - async def test_sniff_on_start_waits_for_sniff_to_complete(self, event_loop): - t = AsyncTransport( + async def test_sniff_on_start_waits_for_sniff_to_complete( + self, event_loop: Any + ) -> None: + t: Any = AsyncTransport( [ {"delay": 1, "data": ""}, {"delay": 1, "data": ""}, @@ -521,8 +531,10 @@ async def test_sniff_on_start_waits_for_sniff_to_complete(self, event_loop): # and then resolved immediately after. assert 1 <= duration < 2 - async def test_sniff_on_start_close_unlocks_async_calls(self, event_loop): - t = AsyncTransport( + async def test_sniff_on_start_close_unlocks_async_calls( + self, event_loop: Any + ) -> None: + t: Any = AsyncTransport( [ {"delay": 10, "data": CLUSTER_NODES}, ], @@ -559,7 +571,7 @@ async def test_init_connection_pool_with_many_hosts(self) -> None: """ amt_hosts = 4 hosts = [{"host": "localhost", "port": 9092}] * amt_hosts - t = AsyncTransport( + t: Any = AsyncTransport( hosts=hosts, ) await t._async_init() @@ -577,7 +589,7 @@ async def test_init_pool_with_connection_class_to_many_hosts(self) -> None: """ amt_hosts = 4 hosts = [{"host": "localhost", "port": 9092}] * amt_hosts - t = AsyncTransport( + t: Any = AsyncTransport( hosts=hosts, connection_class=AIOHttpConnection, ) diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index ad795bcf..e36d9bb6 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -27,21 +27,30 @@ from collections import defaultdict -from unittest import SkipTest # noqa: F401 -from unittest import TestCase +from typing import Any, Sequence +from unittest import SkipTest, TestCase from opensearchpy import OpenSearch class DummyTransport(object): - def __init__(self, hosts, responses=None, **kwargs) -> None: + def __init__( + self, hosts: Sequence[str], responses: Any = None, **kwargs: Any + ) -> None: self.hosts = hosts self.responses = responses - self.call_count = 0 - self.calls = defaultdict(list) + self.call_count: int = 0 + self.calls: Any = defaultdict(list) - def perform_request(self, method, url, params=None, headers=None, body=None): - resp = 200, {} + def perform_request( + self, + method: str, + url: str, + params: Any = None, + headers: Any = None, + body: Any = None, + ) -> Any: + resp: Any = (200, {}) if self.responses: resp = self.responses[self.call_count] self.call_count += 1 @@ -52,12 +61,12 @@ def perform_request(self, method, url, params=None, headers=None, body=None): class OpenSearchTestCase(TestCase): def setUp(self) -> None: super(OpenSearchTestCase, self).setUp() - self.client = OpenSearch(transport_class=DummyTransport) + self.client: Any = OpenSearch(transport_class=DummyTransport) # type: ignore - def assert_call_count_equals(self, count) -> None: + def assert_call_count_equals(self, count: int) -> None: self.assertEqual(count, self.client.transport.call_count) - def assert_url_called(self, method, url, count: int = 1): + def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: self.assertIn((method, url), self.client.transport.calls) calls = self.client.transport.calls[(method, url)] self.assertEqual(count, len(calls)) @@ -78,3 +87,6 @@ def test_each_call_is_recorded(self) -> None: self.assertEqual( [({}, None, "body")], self.assert_url_called("DELETE", "/42", 1) ) + + +__all__ = ["SkipTest", "TestCase"] diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py index d09731bf..ed65dca4 100644 --- a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -17,7 +17,8 @@ class TestPluginsClient(TestCase): def test_plugins_client(self) -> None: with self.assertWarns(Warning) as w: client = OpenSearch() - client.plugins.__init__(client) # double-init + # double-init + client.plugins.__init__(client) # type: ignore self.assertEqual( str(w.warnings[0].message), "Cannot load `alerting` directly to OpenSearch as it already exists. Use `OpenSearch.plugin.alerting` instead.", diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index b6a034eb..797624fc 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -28,17 +28,19 @@ from __future__ import unicode_literals +from typing import Any + from opensearchpy.client.utils import _bulk_body, _escape, _make_path, query_params from ..test_cases import TestCase class TestQueryParams(TestCase): - def setup_method(self, _) -> None: - self.calls = [] + def setup_method(self, _: Any) -> None: + self.calls: Any = [] @query_params("simple_param") - def func_to_wrap(self, *args, **kwargs) -> None: + def func_to_wrap(self, *args: Any, **kwargs: Any) -> None: self.calls.append((args, kwargs)) def test_handles_params(self) -> None: diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py index 6ba12d0d..45cc46fd 100644 --- a/test_opensearchpy/test_connection/test_base_connection.py +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -88,7 +88,7 @@ def test_raises_warnings_when_folded(self) -> None: self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) - def test_ipv6_host_and_port(self): + def test_ipv6_host_and_port(self) -> None: for kwargs, expected_host in [ ({"host": "::1"}, "http://[::1]:9200"), ({"host": "::1", "port": 443}, "http://[::1]:443"), @@ -96,7 +96,7 @@ def test_ipv6_host_and_port(self): ({"host": "127.0.0.1", "port": 1234}, "http://127.0.0.1:1234"), ({"host": "localhost", "use_ssl": True}, "https://localhost:9200"), ]: - conn = Connection(**kwargs) + conn = Connection(**kwargs) # type: ignore assert conn.host == expected_host def test_compatibility_accept_header(self) -> None: diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index 409981f0..7043ec54 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -30,6 +30,7 @@ import re import uuid import warnings +from typing import Any import pytest from mock import Mock, patch @@ -49,24 +50,27 @@ class TestRequestsHttpConnection(TestCase): def _get_mock_connection( - self, connection_params={}, status_code: int = 200, response_body: bytes = b"{}" - ): + self, + connection_params: Any = {}, + status_code: int = 200, + response_body: bytes = b"{}", + ) -> Any: con = RequestsHttpConnection(**connection_params) - def _dummy_send(*args, **kwargs): + def _dummy_send(*args: Any, **kwargs: Any) -> Any: dummy_response = Mock() dummy_response.headers = {} dummy_response.status_code = status_code dummy_response.content = response_body dummy_response.request = args[0] dummy_response.cookies = {} - _dummy_send.call_args = (args, kwargs) + _dummy_send.call_args = (args, kwargs) # type: ignore return dummy_response - con.session.send = _dummy_send + con.session.send = _dummy_send # type: ignore return con - def _get_request(self, connection, *args, **kwargs): + def _get_request(self, connection: Any, *args: Any, **kwargs: Any) -> Any: if "body" in kwargs: kwargs["body"] = kwargs["body"].encode("utf-8") @@ -237,14 +241,14 @@ def test_request_error_is_returned_on_400(self) -> None: self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") @patch("opensearchpy.connection.base.logger") - def test_head_with_404_doesnt_get_logged(self, logger) -> None: + def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: con = self._get_mock_connection(status_code=404) self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") self.assertEqual(0, logger.warning.call_count) @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") - def test_failed_request_logs_and_traces(self, logger, tracer) -> None: + def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: con = self._get_mock_connection( response_body=b'{"answer": 42}', status_code=500 ) @@ -272,7 +276,7 @@ def test_failed_request_logs_and_traces(self, logger, tracer) -> None: @patch("opensearchpy.connection.base.tracer") @patch("opensearchpy.connection.base.logger") - def test_success_logs_and_traces(self, logger, tracer) -> None: + def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") status, headers, data = con.perform_request( "GET", @@ -311,7 +315,7 @@ def test_success_logs_and_traces(self, logger, tracer) -> None: self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger) -> None: + def test_uncompressed_body_logged(self, logger: Any) -> None: con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -366,7 +370,7 @@ def test_http_auth_attached(self) -> None: self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") @patch("opensearchpy.connection.base.tracer") - def test_url_prefix(self, tracer) -> None: + def test_url_prefix(self, tracer: Any) -> None: con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) request = self._get_request( con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 @@ -392,16 +396,16 @@ def test_surrogatepass_into_bytes(self) -> None: def test_recursion_error_reraised(self) -> None: conn = RequestsHttpConnection() - def send_raise(*_, **__): + def send_raise(*_: Any, **__: Any) -> Any: raise RecursionError("Wasn't modified!") - conn.session.send = send_raise + conn.session.send = send_raise # type: ignore with pytest.raises(RecursionError) as e: conn.perform_request("GET", "/") assert str(e.value) == "Wasn't modified!" - def mock_session(self): + def mock_session(self) -> Any: access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -448,7 +452,7 @@ def test_aws_signer_when_service_is_specified(self) -> None: self.assertIn("X-Amz-Security-Token", prepared_request.headers) @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") - def test_aws_signer_signs_with_query_string(self, mock_sign) -> None: + def test_aws_signer_signs_with_query_string(self, mock_sign: Any) -> None: region = "us-west-1" service = "aoss" @@ -469,6 +473,9 @@ def test_aws_signer_signs_with_query_string(self, mock_sign) -> None: class TestRequestsConnectionRedirect: + server1: TestHTTPServer + server2: TestHTTPServer + @classmethod def setup_class(cls) -> None: # Start servers @@ -505,7 +512,7 @@ def test_redirect_success_when_allow_redirect_true(self) -> None: class TestSignerWithFrozenCredentials(TestRequestsHttpConnection): - def mock_session(self): + def mock_session(self) -> Any: access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index c87d8ac0..9720283b 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -32,6 +32,7 @@ from gzip import GzipFile from io import BytesIO from platform import python_version +from typing import Any import pytest import urllib3 @@ -45,15 +46,17 @@ class TestUrllib3HttpConnection(TestCase): - def _get_mock_connection(self, connection_params={}, response_body: bytes = b"{}"): + def _get_mock_connection( + self, connection_params: Any = {}, response_body: bytes = b"{}" + ) -> Any: con = Urllib3HttpConnection(**connection_params) - def _dummy_urlopen(*args, **kwargs): + def _dummy_urlopen(*args: Any, **kwargs: Any) -> Any: dummy_response = Mock() dummy_response.headers = HTTPHeaderDict({}) dummy_response.status = 200 dummy_response.data = response_body - _dummy_urlopen.call_args = (args, kwargs) + _dummy_urlopen.call_args = (args, kwargs) # type: ignore return dummy_response con.pool.urlopen = _dummy_urlopen @@ -181,7 +184,7 @@ def test_http_auth_list(self) -> None: "urllib3.HTTPConnectionPool.urlopen", return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), ) - def test_aws_signer_as_http_auth_adds_headers(self, mock_open) -> None: + def test_aws_signer_as_http_auth_adds_headers(self, mock_open: Any) -> None: from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") @@ -247,7 +250,7 @@ def test_aws_signer_when_service_is_specified(self) -> None: self.assertIn("X-Amz-Date", headers) self.assertIn("X-Amz-Security-Token", headers) - def mock_session(self): + def mock_session(self) -> Any: access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -290,6 +293,7 @@ def test_no_warning_when_using_ssl_context(self) -> None: self.assertEqual(0, len(w)) def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: + kwargs: Any for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, @@ -325,7 +329,7 @@ def test_uses_no_ca_certs(self) -> None: self.assertIsNone(c.pool.ca_certs) @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger) -> None: + def test_uncompressed_body_logged(self, logger: Any) -> None: con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -344,7 +348,7 @@ def test_surrogatepass_into_bytes(self) -> None: def test_recursion_error_reraised(self) -> None: conn = Urllib3HttpConnection() - def urlopen_raise(*_, **__): + def urlopen_raise(*_: Any, **__: Any) -> Any: raise RecursionError("Wasn't modified!") conn.pool.urlopen = urlopen_raise @@ -355,7 +359,7 @@ def urlopen_raise(*_, **__): class TestSignerWithFrozenCredentials(TestUrllib3HttpConnection): - def mock_session(self): + def mock_session(self) -> Any: access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index 5630030e..45afd93e 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -27,6 +27,7 @@ import time +from typing import Any from opensearchpy.connection import Connection from opensearchpy.connection_pool import ( @@ -57,7 +58,7 @@ def test_default_round_robin(self) -> None: connections.add(pool.get_connection()) self.assertEqual(connections, set(range(100))) - def test_disable_shuffling(self): + def test_disable_shuffling(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)], randomize_hosts=False) connections = [] @@ -65,9 +66,9 @@ def test_disable_shuffling(self): connections.append(pool.get_connection()) self.assertEqual(connections, list(range(100))) - def test_selectors_have_access_to_connection_opts(self): + def test_selectors_have_access_to_connection_opts(self) -> None: class MySelector(RoundRobinSelector): - def select(self, connections): + def select(self, connections: Any) -> Any: return self.connection_opts[ super(MySelector, self).select(connections) ]["actual"] diff --git a/test_opensearchpy/test_helpers/conftest.py b/test_opensearchpy/test_helpers/conftest.py index 9c93ccd0..09778000 100644 --- a/test_opensearchpy/test_helpers/conftest.py +++ b/test_opensearchpy/test_helpers/conftest.py @@ -26,24 +26,26 @@ # under the License. +from typing import Any + from mock import Mock from pytest import fixture from opensearchpy.connection.connections import add_connection, connections -@fixture -def mock_client(dummy_response): +@fixture # type: ignore +def mock_client(dummy_response: Any) -> Any: client = Mock() client.search.return_value = dummy_response add_connection("mock", client) yield client - connections._conn = {} + connections._conns = {} connections._kwargs = {} -@fixture -def dummy_response(): +@fixture # type: ignore +def dummy_response() -> Any: return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -91,8 +93,8 @@ def dummy_response(): } -@fixture -def aggs_search(): +@fixture # type: ignore +def aggs_search() -> Any: from opensearchpy import Search s = Search(index="flat-git") @@ -106,8 +108,8 @@ def aggs_search(): return s -@fixture -def aggs_data(): +@fixture # type: ignore +def aggs_data() -> Any: return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 35b78d9a..739e8647 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -28,6 +28,7 @@ import threading import time +from typing import Any import mock import pytest @@ -40,19 +41,19 @@ lock_side_effect = threading.Lock() -def mock_process_bulk_chunk(*args, **kwargs): +def mock_process_bulk_chunk(*args: Any, **kwargs: Any) -> Any: """ Threadsafe way of mocking process bulk chunk: https://stackoverflow.com/questions/39332139/thread-safe-version-of-mock-call-count """ with lock_side_effect: - mock_process_bulk_chunk.call_count += 1 + mock_process_bulk_chunk.call_count += 1 # type: ignore time.sleep(0.1) return [] -mock_process_bulk_chunk.call_count = 0 +mock_process_bulk_chunk.call_count = 0 # type: ignore class TestParallelBulk(TestCase): @@ -60,21 +61,21 @@ class TestParallelBulk(TestCase): "opensearchpy.helpers.actions._process_bulk_chunk", side_effect=mock_process_bulk_chunk, ) - def test_all_chunks_sent(self, _process_bulk_chunk) -> None: + def test_all_chunks_sent(self, _process_bulk_chunk: Any) -> None: actions = ({"x": i} for i in range(100)) list(helpers.parallel_bulk(OpenSearch(), actions, chunk_size=2)) - self.assertEqual(50, mock_process_bulk_chunk.call_count) + self.assertEqual(50, mock_process_bulk_chunk.call_count) # type: ignore - @pytest.mark.skip + @pytest.mark.skip # type: ignore @mock.patch( "opensearchpy.helpers.actions._process_bulk_chunk", # make sure we spend some time in the thread side_effect=lambda *a: [ - (True, time.sleep(0.001) or threading.current_thread().ident) + (True, time.sleep(0.001) or threading.current_thread().ident) # type: ignore ], ) - def test_chunk_sent_from_different_threads(self, _process_bulk_chunk) -> None: + def test_chunk_sent_from_different_threads(self, _process_bulk_chunk: Any) -> None: actions = ({"x": i} for i in range(100)) results = list( helpers.parallel_bulk(OpenSearch(), actions, thread_count=10, chunk_size=2) @@ -83,8 +84,8 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk) -> None: class TestChunkActions(TestCase): - def setup_method(self, _) -> None: - self.actions = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip + def setup_method(self, _: Any) -> None: + self.actions: Any = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip def test_expand_action(self) -> None: self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) @@ -92,7 +93,7 @@ def test_expand_action(self) -> None: helpers.expand_action({"key": "val"}), ({"index": {}}, {"key": "val"}) ) - def test_expand_action_actions(self): + def test_expand_action_actions(self) -> None: self.assertEqual( helpers.expand_action( {"_op_type": "delete", "_id": "id", "_index": "index"} @@ -154,7 +155,7 @@ def test_expand_action_options(self) -> None: ({"index": {action_option: 0}}, {"key": "val"}), ) - def test__source_metadata_or_source(self): + def test__source_metadata_or_source(self) -> None: self.assertEqual( helpers.expand_action({"_source": {"key": "val"}}), ({"index": {}}, {"key": "val"}), diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index f46dd132..8a23c218 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -37,7 +37,7 @@ def test_repr() -> None: assert "Terms(aggs={'max_score': Max(field='score')}, field='tags')" == repr(a) -def test_meta(): +def test_meta() -> None: max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -66,7 +66,7 @@ def test_A_creates_proper_agg() -> None: assert a._params == {"field": "tags"} -def test_A_handles_nested_aggs_properly(): +def test_A_handles_nested_aggs_properly() -> None: max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -79,7 +79,7 @@ def test_A_passes_aggs_through() -> None: assert aggs.A(a) is a -def test_A_from_dict(): +def test_A_from_dict() -> None: d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -95,7 +95,7 @@ def test_A_from_dict(): assert a.aggs.per_author == aggs.A("terms", field="author.raw") -def test_A_fails_with_incorrect_dict(): +def test_A_fails_with_incorrect_dict() -> None: correct_d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -148,7 +148,7 @@ def test_buckets_equals_counts_subaggs() -> None: assert a != b -def test_buckets_to_dict(): +def test_buckets_to_dict() -> None: a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") @@ -189,7 +189,7 @@ def test_filter_can_be_instantiated_using_positional_args() -> None: assert a == aggs.A("filter", query.Q("term", f=42)) -def test_filter_aggregation_as_nested_agg(): +def test_filter_aggregation_as_nested_agg() -> None: a = aggs.Terms(field="tags") a.bucket("filtered", "filter", query.Q("term", f=42)) @@ -199,7 +199,7 @@ def test_filter_aggregation_as_nested_agg(): } == a.to_dict() -def test_filter_aggregation_with_nested_aggs(): +def test_filter_aggregation_with_nested_aggs() -> None: a = aggs.Filter(query.Q("term", f=42)) a.bucket("testing", "terms", field="tags") @@ -229,7 +229,7 @@ def test_filters_correctly_identifies_the_hash() -> None: assert a.filters.group_a == query.Q("term", group="a") -def test_bucket_sort_agg(): +def test_bucket_sort_agg() -> None: bucket_sort_agg = aggs.BucketSort(sort=[{"total_sales": {"order": "desc"}}], size=3) assert bucket_sort_agg.to_dict() == { "bucket_sort": {"sort": [{"total_sales": {"order": "desc"}}], "size": 3} @@ -254,7 +254,7 @@ def test_bucket_sort_agg(): } == a.to_dict() -def test_bucket_sort_agg_only_trnunc(): +def test_bucket_sort_agg_only_trnunc() -> None: bucket_sort_agg = aggs.BucketSort(**{"from": 1, "size": 1}) assert bucket_sort_agg.to_dict() == {"bucket_sort": {"from": 1, "size": 1}} @@ -284,7 +284,7 @@ def test_boxplot_aggregation() -> None: assert {"boxplot": {"field": "load_time"}} == a.to_dict() -def test_rare_terms_aggregation(): +def test_rare_terms_aggregation() -> None: a = aggs.RareTerms(field="the-field") a.bucket("total_sales", "sum", field="price") a.bucket( @@ -316,7 +316,7 @@ def test_median_absolute_deviation_aggregation() -> None: assert {"median_absolute_deviation": {"field": "rating"}} == a.to_dict() -def test_t_test_aggregation(): +def test_t_test_aggregation() -> None: a = aggs.TTest( a={"field": "startup_time_before"}, b={"field": "startup_time_after"}, @@ -332,14 +332,14 @@ def test_t_test_aggregation(): } == a.to_dict() -def test_inference_aggregation(): +def test_inference_aggregation() -> None: a = aggs.Inference(model_id="model-id", buckets_path={"agg_name": "agg_name"}) assert { "inference": {"buckets_path": {"agg_name": "agg_name"}, "model_id": "model-id"} } == a.to_dict() -def test_moving_percentiles_aggregation(): +def test_moving_percentiles_aggregation() -> None: a = aggs.DateHistogram() a.bucket("the_percentile", "percentiles", field="price", percents=[1.0, 99.0]) a.pipeline( diff --git a/test_opensearchpy/test_helpers/test_analysis.py b/test_opensearchpy/test_helpers/test_analysis.py index 7b8f6b04..0226ee48 100644 --- a/test_opensearchpy/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_helpers/test_analysis.py @@ -36,7 +36,7 @@ def test_analyzer_serializes_as_name() -> None: assert "my_analyzer" == a.to_dict() -def test_analyzer_has_definition(): +def test_analyzer_has_definition() -> None: a = analysis.CustomAnalyzer( "my_analyzer", tokenizer="keyword", filter=["lowercase"] ) @@ -48,7 +48,7 @@ def test_analyzer_has_definition(): } == a.get_definition() -def test_simple_multiplexer_filter(): +def test_simple_multiplexer_filter() -> None: a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -76,7 +76,7 @@ def test_simple_multiplexer_filter(): } == a.get_analysis_definition() -def test_multiplexer_with_custom_filter(): +def test_multiplexer_with_custom_filter() -> None: a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -107,7 +107,7 @@ def test_multiplexer_with_custom_filter(): } == a.get_analysis_definition() -def test_conditional_token_filter(): +def test_conditional_token_filter() -> None: a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -172,7 +172,7 @@ def test_normalizer_serializes_as_name() -> None: assert "my_normalizer" == n.to_dict() -def test_normalizer_has_definition(): +def test_normalizer_has_definition() -> None: n = analysis.CustomNormalizer( "my_normalizer", filter=["lowercase", "asciifolding"], char_filter=["quote"] ) @@ -191,7 +191,7 @@ def test_tokenizer() -> None: assert {"type": "nGram", "min_gram": 3, "max_gram": 3} == t.get_definition() -def test_custom_analyzer_can_collect_custom_items(): +def test_custom_analyzer_can_collect_custom_items() -> None: trigram = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) my_stop = analysis.token_filter("my_stop", "stop", stopwords=["a", "b"]) umlauts = analysis.char_filter("umlauts", "pattern_replace", mappings=["ü=>ue"]) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index ed78b4c0..e1b5e5c4 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -32,6 +32,7 @@ import pickle from datetime import datetime from hashlib import sha256 +from typing import Any from pytest import raises @@ -52,7 +53,7 @@ class MyDoc(document.Document): class MySubDoc(MyDoc): - name = field.Keyword() + name: Any = field.Keyword() class Index: name = "default-index" @@ -92,10 +93,10 @@ class Secret(str): class SecretField(field.CustomField): builtin_type = "text" - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: return codecs.encode(data, "rot_13") - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, Secret): return data return Secret(codecs.decode(data, "rot_13")) @@ -114,6 +115,8 @@ class NestedSecret(document.Document): class Index: name = "test-nested-secret" + _index: Any + class OptionalObjectWithRequiredField(document.Document): comments = field.Nested(properties={"title": field.Keyword(required=True)}) @@ -121,6 +124,8 @@ class OptionalObjectWithRequiredField(document.Document): class Index: name = "test-required" + _index: Any + class Host(document.Document): ip = field.Ip() @@ -128,12 +133,14 @@ class Host(document.Document): class Index: name = "test-host" + _index: Any + def test_range_serializes_properly() -> None: class D(document.Document): lr = field.LongRange() - d = D(lr=Range(lt=42)) + d: Any = D(lr=Range(lt=42)) assert 40 in d.lr assert 47 not in d.lr assert {"lr": {"lt": 42}} == d.to_dict() @@ -146,7 +153,7 @@ def test_range_deserializes_properly() -> None: class D(document.InnerDoc): lr = field.LongRange() - d = D.from_opensearch({"lr": {"lt": 42}}, True) + d: Any = D.from_opensearch({"lr": {"lt": 42}}, True) assert isinstance(d.lr, Range) assert 40 in d.lr assert 47 not in d.lr @@ -165,7 +172,7 @@ class A(document.Document): class B(document.Document): name = field.Keyword() - i = Index("i") + i: Any = Index("i") i.document(A) i.document(B) @@ -174,7 +181,7 @@ class B(document.Document): def test_ip_address_serializes_properly() -> None: - host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) + host: Any = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() @@ -202,7 +209,7 @@ class Index: def test_assigning_attrlist_to_field() -> None: - sc = SimpleCommit() + sc: Any = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -210,20 +217,20 @@ def test_assigning_attrlist_to_field() -> None: def test_optional_inner_objects_are_not_validated_if_missing() -> None: - d = OptionalObjectWithRequiredField() + d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None def test_custom_field() -> None: - s = SecretDoc(title=Secret("Hello")) + s1: Any = SecretDoc(title=Secret("Hello")) - assert {"title": "Uryyb"} == s.to_dict() - assert s.title == "Hello" + assert {"title": "Uryyb"} == s1.to_dict() + assert s1.title == "Hello" - s = SecretDoc.from_opensearch({"_source": {"title": "Uryyb"}}) - assert s.title == "Hello" - assert isinstance(s.title, Secret) + s2: Any = SecretDoc.from_opensearch({"_source": {"title": "Uryyb"}}) + assert s2.title == "Hello" + assert isinstance(s2.title, Secret) def test_custom_field_mapping() -> None: @@ -233,7 +240,7 @@ def test_custom_field_mapping() -> None: def test_custom_field_in_nested() -> None: - s = NestedSecret() + s: Any = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) assert {"secrets": [{"title": "Uryyb"}]} == s.to_dict() @@ -241,7 +248,7 @@ def test_custom_field_in_nested() -> None: def test_multi_works_after_doc_has_been_saved() -> None: - c = SimpleCommit() + c: Any = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -250,7 +257,7 @@ def test_multi_works_after_doc_has_been_saved() -> None: def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: # Issue #359 - c = DocWithNested(comments=[Comment(title="First!")]) + c: Any = DocWithNested(comments=[Comment(title="First!")]) assert [] == c.comments[0].tags assert {"comments": [{"title": "First!"}]} == c.to_dict() @@ -258,17 +265,19 @@ def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: def test_null_value_for_object() -> None: - d = MyDoc(inner=None) + d: Any = MyDoc(inner=None) assert d.inner is None -def test_inherited_doc_types_can_override_index(): +def test_inherited_doc_types_can_override_index() -> None: class MyDocDifferentIndex(MySubDoc): + _index: Any + class Index: name = "not-default-index" settings = {"number_of_replicas": 0} - aliases = {"a": {}} + aliases: Any = {"a": {}} analyzers = [analyzer("my_analizer", tokenizer="keyword")] assert MyDocDifferentIndex._index._name == "not-default-index" @@ -295,8 +304,8 @@ class Index: } -def test_to_dict_with_meta(): - d = MySubDoc(title="hello") +def test_to_dict_with_meta() -> None: + d: Any = MySubDoc(title="hello") d.meta.routing = "some-parent" assert { @@ -306,29 +315,29 @@ def test_to_dict_with_meta(): } == d.to_dict(True) -def test_to_dict_with_meta_includes_custom_index(): - d = MySubDoc(title="hello") +def test_to_dict_with_meta_includes_custom_index() -> None: + d: Any = MySubDoc(title="hello") d.meta.index = "other-index" assert {"_index": "other-index", "_source": {"title": "hello"}} == d.to_dict(True) def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: - d = MySubDoc(tags=[], title=None, inner={}) + d: Any = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() assert {"tags": [], "title": None, "inner": {}} == d.to_dict(skip_empty=False) def test_attribute_can_be_removed() -> None: - d = MyDoc(title="hello") + d: Any = MyDoc(title="hello") del d.title assert "title" not in d._d_ def test_doc_type_can_be_correctly_pickled() -> None: - d = DocWithNested( + d: Any = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) s = pickle.dumps(d) @@ -343,14 +352,14 @@ def test_doc_type_can_be_correctly_pickled() -> None: def test_meta_is_accessible_even_on_empty_doc() -> None: - d = MyDoc() - d.meta + d1: Any = MyDoc() + d1.meta - d = MyDoc(title="aaa") - d.meta + d2: Any = MyDoc(title="aaa") + d2.meta -def test_meta_field_mapping(): +def test_meta_field_mapping() -> None: class User(document.Document): username = field.Text() @@ -373,7 +382,7 @@ def test_multi_value_fields() -> None: class Blog(document.Document): tags = field.Keyword(multi=True) - b = Blog() + b: Any = Blog() assert [] == b.tags b.tags.append("search") b.tags.append("python") @@ -382,20 +391,20 @@ class Blog(document.Document): def test_docs_with_properties() -> None: class User(document.Document): - pwd_hash = field.Text() + pwd_hash: Any = field.Text() - def check_password(self, pwd): + def check_password(self, pwd: Any) -> Any: return sha256(pwd).hexdigest() == self.pwd_hash @property - def password(self): + def password(self) -> Any: raise AttributeError("readonly") @password.setter - def password(self, pwd): + def password(self, pwd: Any) -> None: self.pwd_hash = sha256(pwd).hexdigest() - u = User(pwd_hash=sha256(b"secret").hexdigest()) + u: Any = User(pwd_hash=sha256(b"secret").hexdigest()) assert u.check_password(b"secret") assert not u.check_password(b"not-secret") @@ -409,8 +418,8 @@ def password(self, pwd): def test_nested_can_be_assigned_to() -> None: - d1 = DocWithNested(comments=[Comment(title="First!")]) - d2 = DocWithNested() + d1: Any = DocWithNested(comments=[Comment(title="First!")]) + d2: Any = DocWithNested() d2.comments = d1.comments assert isinstance(d1.comments[0], Comment) @@ -420,13 +429,13 @@ def test_nested_can_be_assigned_to() -> None: def test_nested_can_be_none() -> None: - d = DocWithNested(comments=None, title="Hello World!") + d: Any = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() def test_nested_defaults_to_list_and_can_be_updated() -> None: - md = DocWithNested() + md: Any = DocWithNested() assert [] == md.comments @@ -434,8 +443,8 @@ def test_nested_defaults_to_list_and_can_be_updated() -> None: assert {"comments": [{"title": "hello World!"}]} == md.to_dict() -def test_to_dict_is_recursive_and_can_cope_with_multi_values(): - md = MyDoc(name=["a", "b", "c"]) +def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: + md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] assert isinstance(md.inner[0], MyInner) @@ -447,12 +456,12 @@ def test_to_dict_is_recursive_and_can_cope_with_multi_values(): def test_to_dict_ignores_empty_collections() -> None: - md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) + md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() -def test_declarative_mapping_definition(): +def test_declarative_mapping_definition() -> None: assert issubclass(MyDoc, document.Document) assert hasattr(MyDoc, "_doc_type") assert { @@ -465,7 +474,7 @@ def test_declarative_mapping_definition(): } == MyDoc._doc_type.mapping.to_dict() -def test_you_can_supply_own_mapping_instance(): +def test_you_can_supply_own_mapping_instance() -> None: class MyD(document.Document): title = field.Text() @@ -479,9 +488,9 @@ class Meta: } == MyD._doc_type.mapping.to_dict() -def test_document_can_be_created_dynamically(): +def test_document_can_be_created_dynamically() -> None: n = datetime.now() - md = MyDoc(title="hello") + md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" md.created_at = n @@ -501,13 +510,13 @@ def test_document_can_be_created_dynamically(): def test_invalid_date_will_raise_exception() -> None: - md = MyDoc() + md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): md.full_clean() -def test_document_inheritance(): +def test_document_inheritance() -> None: assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.Document) assert hasattr(MySubDoc, "_doc_type") @@ -521,7 +530,7 @@ def test_document_inheritance(): } == MySubDoc._doc_type.mapping.to_dict() -def test_child_class_can_override_parent(): +def test_child_class_can_override_parent() -> None: class A(document.Document): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -540,7 +549,7 @@ class B(A): def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: - md = MySubDoc(meta={"id": 42}, name="My First doc!") + md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" assert md.meta.index == "my-index" @@ -549,7 +558,7 @@ def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: assert {"id": 42, "index": "my-index"} == md.meta.to_dict() -def test_index_inheritance(): +def test_index_inheritance() -> None: assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.Document) @@ -568,31 +577,31 @@ def test_index_inheritance(): def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() - md = MyDoc(_id=p, title="Hello World!") + md: Any = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -def test_save_no_index(mock_client) -> None: - md = MyDoc() +def test_save_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): md.save(using="mock") -def test_delete_no_index(mock_client) -> None: - md = MyDoc() +def test_delete_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): md.delete(using="mock") def test_update_no_fields() -> None: - md = MyDoc() + md: Any = MyDoc() with raises(IllegalOperation): md.update() -def test_search_with_custom_alias_and_index(mock_client) -> None: - search_object = MyDoc.search( +def test_search_with_custom_alias_and_index(mock_client: Any) -> None: + search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -600,7 +609,7 @@ def test_search_with_custom_alias_and_index(mock_client) -> None: assert search_object._index == ["custom_index1", "custom_index2"] -def test_from_opensearch_respects_underscored_non_meta_fields(): +def test_from_opensearch_respects_underscored_non_meta_fields() -> None: doc = { "_index": "test-index", "_id": "opensearch", @@ -617,18 +626,18 @@ class Company(document.Document): class Index: name = "test-company" - c = Company.from_opensearch(doc) + c: Any = Company.from_opensearch(doc) assert c.meta.fields._tags == ["search"] assert c.meta.fields._routing == "opensearch" assert c._tagline == "You know, for search" -def test_nested_and_object_inner_doc(): +def test_nested_and_object_inner_doc() -> None: class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) - props = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] + props: Any = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] assert props == { "created_at": {"type": "date"}, "inner": {"properties": {"old_field": {"type": "text"}}, "type": "object"}, diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index e663bca1..528cd485 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -26,6 +26,7 @@ # under the License. from datetime import datetime +from typing import Any import pytest @@ -72,7 +73,7 @@ def test_query_is_created_properly() -> None: } == s.to_dict() -def test_query_is_created_properly_with_sort_tuple(): +def test_query_is_created_properly_with_sort_tuple() -> None: bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -96,7 +97,7 @@ def test_query_is_created_properly_with_sort_tuple(): } == s.to_dict() -def test_filter_is_applied_to_search_but_not_relevant_facet(): +def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -119,7 +120,7 @@ def test_filter_is_applied_to_search_but_not_relevant_facet(): } == s.to_dict() -def test_filters_are_applied_to_search_ant_relevant_facets(): +def test_filters_are_applied_to_search_ant_relevant_facets() -> None: bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -159,7 +160,7 @@ def test_date_histogram_facet_with_1970_01_01_date() -> None: assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore ["interval_type", "interval"], [ ("interval", "year"), @@ -186,7 +187,7 @@ def test_date_histogram_facet_with_1970_01_01_date() -> None: ("fixed_interval", "1h"), ], ) -def test_date_histogram_interval_types(interval_type, interval) -> None: +def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index 19582730..ce818b50 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -28,6 +28,7 @@ import base64 from datetime import datetime from ipaddress import ip_address +from typing import Any import pytest from dateutil import tz @@ -59,7 +60,7 @@ def test_boolean_deserialization() -> None: def test_date_field_can_have_default_tz() -> None: - f = field.Date(default_timezone="UTC") + f: Any = field.Date(default_timezone="UTC") now = datetime.now() now_with_tz = f._deserialize(now) @@ -76,7 +77,7 @@ def test_date_field_can_have_default_tz() -> None: def test_custom_field_car_wrap_other_field() -> None: class MyField(field.CustomField): @property - def builtin_type(self): + def builtin_type(self) -> Any: return field.Text(**self._params) assert {"type": "text", "index": "not_analyzed"} == MyField( @@ -91,7 +92,7 @@ def test_field_from_dict() -> None: assert {"type": "text", "index": "not_analyzed"} == f.to_dict() -def test_multi_fields_are_accepted_and_parsed(): +def test_multi_fields_are_accepted_and_parsed() -> None: f = field.construct_field( "text", fields={"raw": {"type": "keyword"}, "eng": field.Text(analyzer="english")}, @@ -123,7 +124,7 @@ def test_field_supports_multiple_analyzers() -> None: } == f.to_dict() -def test_multifield_supports_multiple_analyzers(): +def test_multifield_supports_multiple_analyzers() -> None: f = field.Text( fields={ "f1": field.Text(search_analyzer="keyword", analyzer="snowball"), @@ -145,8 +146,8 @@ def test_multifield_supports_multiple_analyzers(): def test_scaled_float() -> None: with pytest.raises(TypeError): - field.ScaledFloat() - f = field.ScaledFloat(123) + field.ScaledFloat() # type: ignore + f: Any = field.ScaledFloat(scaling_factor=123) assert f.to_dict() == {"scaling_factor": 123, "type": "scaled_float"} @@ -204,7 +205,7 @@ def test_object_disabled() -> None: assert f.to_dict() == {"type": "object", "enabled": False} -def test_object_constructor(): +def test_object_constructor() -> None: expected = {"type": "object", "properties": {"inner_int": {"type": "integer"}}} class Inner(InnerDoc): diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index bb8aa578..59c3e28e 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -27,6 +27,7 @@ import string from random import choice +from typing import Any from pytest import raises @@ -65,7 +66,7 @@ def test_search_is_limited_to_index_name() -> None: def test_cloned_index_has_copied_settings_and_using() -> None: client = object() - i = Index("my-index", using=client) + i: Any = Index("my-index", using=client) i.settings(number_of_shards=1) i2 = i.clone("my-other-index") @@ -82,7 +83,7 @@ def test_cloned_index_has_analysis_attribute() -> None: over the `_analysis` attribute. """ client = object() - i = Index("my-index", using=client) + i: Any = Index("my-index", using=client) random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( @@ -97,7 +98,7 @@ def test_cloned_index_has_analysis_attribute() -> None: def test_settings_are_saved() -> None: - i = Index("i") + i: Any = Index("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -105,7 +106,7 @@ def test_settings_are_saved() -> None: def test_registered_doc_type_included_in_to_dict() -> None: - i = Index("i", using="alias") + i: Any = Index("i", using="alias") i.document(Post) assert { @@ -119,7 +120,7 @@ def test_registered_doc_type_included_in_to_dict() -> None: def test_registered_doc_type_included_in_search() -> None: - i = Index("i", using="alias") + i: Any = Index("i", using="alias") i.document(Post) s = i.search() @@ -129,9 +130,9 @@ def test_registered_doc_type_included_in_search() -> None: def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.aliases(**alias_dict) assert index._aliases == alias_dict @@ -139,21 +140,21 @@ def test_aliases_add_to_object() -> None: def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.aliases(**alias_dict) assert index._aliases == index.to_dict()["aliases"] == alias_dict -def test_analyzers_added_to_object(): +def test_analyzers_added_to_object() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.analyzer(random_analyzer) assert index._analysis["analyzer"][random_analyzer_name] == { @@ -163,12 +164,12 @@ def test_analyzers_added_to_object(): } -def test_analyzers_returned_from_to_dict(): +def test_analyzers_returned_from_to_dict() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.analyzer(random_analyzer) assert index.to_dict()["settings"]["analysis"]["analyzer"][ @@ -177,21 +178,21 @@ def test_analyzers_returned_from_to_dict(): def test_conflicting_analyzer_raises_error() -> None: - i = Index("i") + i: Any = Index("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) with raises(ValueError): i.analyzer("my_analyzer", tokenizer="keyword", filter=["lowercase", "stop"]) -def test_index_template_can_have_order(): - i = Index("i-*") +def test_index_template_can_have_order() -> None: + i: Any = Index("i-*") it = i.as_template("i", order=2) assert {"index_patterns": ["i-*"], "order": 2} == it.to_dict() -def test_index_template_save_result(mock_client) -> None: - it = IndexTemplate("test-template", "test-*") +def test_index_template_save_result(mock_client: Any) -> None: + it: Any = IndexTemplate("test-template", "test-*") assert it.save(using="mock") == mock_client.indices.put_template() diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index 5e4e49ce..2006b66f 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -40,7 +40,7 @@ def test_mapping_can_has_fields() -> None: } == m.to_dict() -def test_mapping_update_is_recursive(): +def test_mapping_update_is_recursive() -> None: m1 = mapping.Mapping() m1.field("title", "text") m1.field("author", "object") @@ -83,7 +83,7 @@ def test_properties_can_iterate_over_all_the_fields() -> None: } -def test_mapping_can_collect_all_analyzers_and_normalizers(): +def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -156,7 +156,7 @@ def test_mapping_can_collect_all_analyzers_and_normalizers(): assert json.loads(json.dumps(m.to_dict())) == m.to_dict() -def test_mapping_can_collect_multiple_analyzers(): +def test_mapping_can_collect_multiple_analyzers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 142b865c..27790748 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -25,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from pytest import raises from opensearchpy.helpers import function, query @@ -122,8 +124,8 @@ def test_other_and_bool_appends_other_to_must() -> None: def test_bool_and_other_appends_other_to_must() -> None: - q1 = query.Match(f="value1") - qb = query.Bool() + q1: Any = query.Match(f="value1") + qb: Any = query.Bool() q = qb & q1 assert q is not qb @@ -463,7 +465,7 @@ def test_function_score_with_functions() -> None: } == q.to_dict() -def test_function_score_with_no_function_is_boost_factor(): +def test_function_score_with_no_function_is_boost_factor() -> None: q = query.Q( "function_score", functions=[query.SF({"weight": 20, "filter": query.Q("term", f=42)})], @@ -474,7 +476,7 @@ def test_function_score_with_no_function_is_boost_factor(): } == q.to_dict() -def test_function_score_to_dict(): +def test_function_score_to_dict() -> None: q = query.Q( "function_score", query=query.Q("match", title="python"), @@ -503,7 +505,7 @@ def test_function_score_to_dict(): assert d == q.to_dict() -def test_function_score_with_single_function(): +def test_function_score_with_single_function() -> None: d = { "function_score": { "filter": {"term": {"tags": "python"}}, @@ -521,7 +523,7 @@ def test_function_score_with_single_function(): assert "doc['comment_count'] * _score" == sf.script -def test_function_score_from_dict(): +def test_function_score_from_dict() -> None: d = { "function_score": { "filter": {"term": {"tags": "python"}}, diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index 657beb05..296553f3 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -27,6 +27,7 @@ import pickle from datetime import date +from typing import Any from pytest import fixture, raises @@ -36,12 +37,12 @@ from opensearchpy.helpers.response.aggs import AggResponse, Bucket, BucketData -@fixture -def agg_response(aggs_search, aggs_data): +@fixture # type: ignore +def agg_response(aggs_search: Any, aggs_data: Any) -> Any: return response.Response(aggs_search, aggs_data) -def test_agg_response_is_pickleable(agg_response) -> None: +def test_agg_response_is_pickleable(agg_response: Any) -> None: agg_response.hits r = pickle.loads(pickle.dumps(agg_response)) @@ -50,7 +51,7 @@ def test_agg_response_is_pickleable(agg_response) -> None: assert r.hits == agg_response.hits -def test_response_is_pickleable(dummy_response) -> None: +def test_response_is_pickleable(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) res.hits r = pickle.loads(pickle.dumps(res)) @@ -60,7 +61,7 @@ def test_response_is_pickleable(dummy_response) -> None: assert r.hits == res.hits -def test_hit_is_pickleable(dummy_response) -> None: +def test_hit_is_pickleable(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = pickle.loads(pickle.dumps(res.hits)) @@ -68,14 +69,14 @@ def test_hit_is_pickleable(dummy_response) -> None: assert hits[0].meta == res.hits[0].meta -def test_response_stores_search(dummy_response) -> None: +def test_response_stores_search(dummy_response: Any) -> None: s = Search() r = response.Response(s, dummy_response) assert r._search is s -def test_interactive_helpers(dummy_response) -> None: +def test_interactive_helpers(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = res.hits h = hits[0] @@ -98,19 +99,19 @@ def test_interactive_helpers(dummy_response) -> None: ] == repr(h) -def test_empty_response_is_false(dummy_response) -> None: +def test_empty_response_is_false(dummy_response: Any) -> None: dummy_response["hits"]["hits"] = [] res = response.Response(Search(), dummy_response) assert not res -def test_len_response(dummy_response) -> None: +def test_len_response(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) assert len(res) == 4 -def test_iterating_over_response_gives_you_hits(dummy_response) -> None: +def test_iterating_over_response_gives_you_hits(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = list(h for h in res) @@ -127,7 +128,7 @@ def test_iterating_over_response_gives_you_hits(dummy_response) -> None: assert hits[1].meta.routing == "opensearch" -def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response) -> None: +def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = res.hits @@ -135,7 +136,7 @@ def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response) -> None: assert 12.0 == hits.max_score -def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response) -> None: +def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) h = res.hits[0] @@ -151,30 +152,32 @@ def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response) -> None: h.not_there -def test_slicing_on_response_slices_on_hits(dummy_response) -> None: +def test_slicing_on_response_slices_on_hits(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) assert res[0] is res.hits[0] assert res[::-1] == res.hits[::-1] -def test_aggregation_base(agg_response) -> None: +def test_aggregation_base(agg_response: Any) -> None: assert agg_response.aggs is agg_response.aggregations assert isinstance(agg_response.aggs, response.AggResponse) -def test_metric_agg_works(agg_response) -> None: +def test_metric_agg_works(agg_response: Any) -> None: assert 25052.0 == agg_response.aggs.sum_lines.value -def test_aggregations_can_be_iterated_over(agg_response) -> None: +def test_aggregations_can_be_iterated_over(agg_response: Any) -> None: aggs = [a for a in agg_response.aggs] assert len(aggs) == 3 assert all(map(lambda a: isinstance(a, AggResponse), aggs)) -def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search) -> None: +def test_aggregations_can_be_retrieved_by_name( + agg_response: Any, aggs_search: Any +) -> None: a = agg_response.aggs["popular_files"] assert isinstance(a, BucketData) @@ -182,7 +185,7 @@ def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search) -> Non assert a._meta["aggs"] is aggs_search.aggs.aggs["popular_files"] -def test_bucket_response_can_be_iterated_over(agg_response) -> None: +def test_bucket_response_can_be_iterated_over(agg_response: Any) -> None: popular_files = agg_response.aggregations.popular_files buckets = [b for b in popular_files] @@ -190,7 +193,7 @@ def test_bucket_response_can_be_iterated_over(agg_response) -> None: assert buckets == popular_files.buckets -def test_bucket_keys_get_deserialized(aggs_data, aggs_search) -> None: +def test_bucket_keys_get_deserialized(aggs_data: Any, aggs_search: Any) -> None: class Commit(Document): info = Object(properties={"committed_date": Date()}) diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index 73d078a9..b44d5dd5 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -26,6 +26,7 @@ # under the License. from copy import deepcopy +from typing import Any from pytest import raises @@ -41,16 +42,16 @@ def test_expand__to_dot_is_respected() -> None: def test_execute_uses_cache() -> None: - s = search.Search() - r = object() + s: Any = search.Search() + r: Any = object() s._response = r assert r is s.execute() -def test_cache_can_be_ignored(mock_client) -> None: - s = search.Search(using="mock") - r = object() +def test_cache_can_be_ignored(mock_client: Any) -> None: + s: Any = search.Search(using="mock") + r: Any = object() s._response = r s.execute(ignore_cache=True) @@ -58,27 +59,27 @@ def test_cache_can_be_ignored(mock_client) -> None: def test_iter_iterates_over_hits() -> None: - s = search.Search() + s: Any = search.Search() s._response = [1, 2, 3] assert [1, 2, 3] == list(s) def test_cache_isnt_cloned() -> None: - s = search.Search() + s: Any = search.Search() s._response = object() assert not hasattr(s._clone(), "_response") def test_search_starts_with_no_query() -> None: - s = search.Search() + s: Any = search.Search() assert s.query._proxied is None def test_search_query_combines_query() -> None: - s = search.Search() + s: Any = search.Search() s2 = s.query("match", f=42) assert s2.query._proxied == query.Match(f=42) @@ -90,7 +91,7 @@ def test_search_query_combines_query() -> None: def test_query_can_be_assigned_to() -> None: - s = search.Search() + s: Any = search.Search() q = Q("match", title="python") s.query = q @@ -98,8 +99,8 @@ def test_query_can_be_assigned_to() -> None: assert s.query._proxied is q -def test_query_can_be_wrapped(): - s = search.Search().query("match", title="python") +def test_query_can_be_wrapped() -> None: + s: Any = search.Search().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -114,9 +115,9 @@ def test_query_can_be_wrapped(): def test_using() -> None: - o = object() - o2 = object() - s = search.Search(using=o) + o: Any = object() + o2: Any = object() + s: Any = search.Search(using=o) assert s._using is o s2 = s.using(o2) assert s._using is o @@ -124,27 +125,27 @@ def test_using() -> None: def test_methods_are_proxied_to_the_query() -> None: - s = search.Search().query("match_all") + s: Any = search.Search().query("match_all") assert s.query.to_dict() == {"match_all": {}} def test_query_always_returns_search() -> None: - s = search.Search() + s: Any = search.Search() assert isinstance(s.query("match", f=42), search.Search) def test_source_copied_on_clone() -> None: - s = search.Search().source(False) + s: Any = search.Search().source(False) assert s._clone()._source == s._source assert s._clone()._source is False - s2 = search.Search().source([]) + s2: Any = search.Search().source([]) assert s2._clone()._source == s2._source assert s2._source == [] - s3 = search.Search().source(["some", "fields"]) + s3: Any = search.Search().source(["some", "fields"]) assert s3._clone()._source == s3._source assert s3._clone()._source == ["some", "fields"] @@ -152,15 +153,15 @@ def test_source_copied_on_clone() -> None: def test_copy_clones() -> None: from copy import copy - s1 = search.Search().source(["some", "fields"]) - s2 = copy(s1) + s1: Any = search.Search().source(["some", "fields"]) + s2: Any = copy(s1) assert s1 == s2 assert s1 is not s2 def test_aggs_allow_two_metric() -> None: - s = search.Search() + s: Any = search.Search() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -169,8 +170,8 @@ def test_aggs_allow_two_metric() -> None: } -def test_aggs_get_copied_on_change(): - s = search.Search().query("match_all") +def test_aggs_get_copied_on_change() -> None: + s: Any = search.Search().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" ) @@ -182,7 +183,7 @@ def test_aggs_get_copied_on_change(): s4 = s3._clone() s4.aggs.metric("max_score", "max", field="score") - d = { + d: Any = { "query": {"match_all": {}}, "aggs": { "per_tag": { @@ -245,7 +246,7 @@ class MyDocument(Document): assert s._doc_type_map == {} -def test_sort(): +def test_sort() -> None: s = search.Search() s = s.sort("fielda", "-fieldb") @@ -267,7 +268,7 @@ def test_sort_by_score() -> None: s.sort("-_score") -def test_collapse(): +def test_collapse() -> None: s = search.Search() inner_hits = {"name": "most_recent", "size": 5, "sort": [{"@timestamp": "desc"}]} @@ -315,7 +316,7 @@ def test_index() -> None: assert {"from": 3, "size": 1} == s[3].to_dict() -def test_search_to_dict(): +def test_search_to_dict() -> None: s = search.Search() assert {} == s.to_dict() @@ -344,7 +345,7 @@ def test_search_to_dict(): assert {"size": 5, "from": 42} == s.to_dict() -def test_complex_example(): +def test_complex_example() -> None: s = search.Search() s = ( s.query("match", title="python") @@ -395,7 +396,7 @@ def test_complex_example(): } == s.to_dict() -def test_reverse(): +def test_reverse() -> None: d = { "query": { "filtered": { @@ -451,7 +452,7 @@ def test_from_dict_doesnt_need_query() -> None: assert {"size": 5} == s.to_dict() -def test_params_being_passed_to_search(mock_client) -> None: +def test_params_being_passed_to_search(mock_client: Any) -> None: s = search.Search(using="mock") s = s.params(routing="42") s.execute() @@ -473,7 +474,7 @@ def test_source() -> None: ).source(["f1", "f2"]).to_dict() -def test_source_on_clone(): +def test_source_on_clone() -> None: assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -498,7 +499,7 @@ def test_source_on_clear() -> None: ) -def test_suggest_accepts_global_text(): +def test_suggest_accepts_global_text() -> None: s = search.Search.from_dict( { "suggest": { @@ -520,7 +521,7 @@ def test_suggest_accepts_global_text(): } == s.to_dict() -def test_suggest(): +def test_suggest() -> None: s = search.Search() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -542,7 +543,7 @@ def test_exclude() -> None: } == s.to_dict() -def test_delete_by_query(mock_client) -> None: +def test_delete_by_query(mock_client: Any) -> None: s = search.Search(using="mock").query("match", lang="java") s.delete() @@ -551,7 +552,7 @@ def test_delete_by_query(mock_client) -> None: ) -def test_update_from_dict(): +def test_update_from_dict() -> None: s = search.Search() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -562,7 +563,7 @@ def test_update_from_dict(): } == s.to_dict() -def test_rescore_query_to_dict(): +def test_rescore_query_to_dict() -> None: s = search.Search(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index 74030874..90e7aa78 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -26,6 +26,7 @@ # under the License. from copy import deepcopy +from typing import Any from opensearchpy import Q, UpdateByQuery from opensearchpy.helpers.response import UpdateByQueryResponse @@ -37,7 +38,7 @@ def test_ubq_starts_with_no_query() -> None: assert ubq.query._proxied is None -def test_ubq_to_dict(): +def test_ubq_to_dict() -> None: ubq = UpdateByQuery() assert {} == ubq.to_dict() @@ -53,7 +54,7 @@ def test_ubq_to_dict(): assert {"extra_q": {"term": {"category": "conference"}}} == ubq.to_dict() -def test_complex_example(): +def test_complex_example() -> None: ubq = UpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -104,7 +105,7 @@ def test_exclude() -> None: } == ubq.to_dict() -def test_reverse(): +def test_reverse() -> None: d = { "query": { "filtered": { @@ -146,7 +147,7 @@ def test_from_dict_doesnt_need_query() -> None: assert {"script": {"source": "test"}} == ubq.to_dict() -def test_params_being_passed_to_search(mock_client) -> None: +def test_params_being_passed_to_search(mock_client: Any) -> None: ubq = UpdateByQuery(using="mock") ubq = ubq.params(routing="42") ubq.execute() @@ -156,7 +157,7 @@ def test_params_being_passed_to_search(mock_client) -> None: ) -def test_overwrite_script(): +def test_overwrite_script() -> None: ubq = UpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index 358b9184..b6949833 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -55,7 +55,7 @@ class MyAttrDict(utils.AttrDict): assert isinstance(ls[:][0], MyAttrDict) -def test_merge(): +def test_merge() -> None: a = utils.AttrDict({"a": {"b": 42, "c": 47}}) b = {"a": {"b": 123, "d": -12}, "e": [1, 2, 3]} @@ -101,7 +101,7 @@ def test_serializer_deals_with_Attr_versions() -> None: def test_serializer_deals_with_objects_with_to_dict() -> None: class MyClass(object): - def to_dict(self): + def to_dict(self) -> int: return 42 assert serializer.serializer.dumps(MyClass()) == "42" diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index 1565b352..6841f604 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -26,6 +26,7 @@ # under the License. from datetime import datetime +from typing import Any from pytest import raises @@ -43,8 +44,8 @@ class Author(InnerDoc): - name = Text(required=True) - email = Text(required=True) + name: Any = Text(required=True) + email: Any = Text(required=True) def clean(self) -> None: print(self, type(self), self.name) @@ -63,7 +64,7 @@ class BlogPostWithStatus(Document): class AutoNowDate(Date): - def clean(self, data): + def clean(self, data: Any) -> Any: if data is None: data = datetime.now() return super(AutoNowDate, self).clean(data) @@ -78,7 +79,7 @@ def test_required_int_can_be_0() -> None: class DT(Document): i = Integer(required=True) - dt = DT(i=0) + dt: Any = DT(i=0) assert dt.full_clean() is None @@ -95,12 +96,12 @@ def test_validation_works_for_lists_of_values() -> None: class DT(Document): i = Date(required=True) - dt = DT(i=[datetime.now(), "not date"]) + dt1: Any = DT(i=[datetime.now(), "not date"]) with raises(ValidationException): - dt.full_clean() + dt1.full_clean() - dt = DT(i=[datetime.now(), datetime.now()]) - assert None is dt.full_clean() + dt2: Any = DT(i=[datetime.now(), datetime.now()]) + assert None is dt2.full_clean() def test_field_with_custom_clean() -> None: @@ -111,29 +112,29 @@ def test_field_with_custom_clean() -> None: def test_empty_object() -> None: - d = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) + d: Any = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) d.inner = {} d.full_clean() def test_missing_required_field_raises_validation_exception() -> None: - d = BlogPost() + d1: Any = BlogPost() with raises(ValidationException): - d.full_clean() + d1.full_clean() - d = BlogPost() - d.authors.append({"name": "Guian"}) + d2: Any = BlogPost() + d2.authors.append({"name": "Guian"}) with raises(ValidationException): - d.full_clean() + d2.full_clean() - d = BlogPost() - d.authors.append({"name": "Guian", "email": "guiang@bitquilltech.com"}) - d.full_clean() + d3: Any = BlogPost() + d3.authors.append({"name": "Guian", "email": "guiang@bitquilltech.com"}) + d3.full_clean() def test_boolean_doesnt_treat_false_as_empty() -> None: - d = BlogPostWithStatus() + d: Any = BlogPostWithStatus() with raises(ValidationException): d.full_clean() d.published = False @@ -143,7 +144,9 @@ def test_boolean_doesnt_treat_false_as_empty() -> None: def test_custom_validation_on_nested_gets_run() -> None: - d = BlogPost(authors=[Author(name="Guian", email="king@example.com")], created=None) + d: Any = BlogPost( + authors=[Author(name="Guian", email="king@example.com")], created=None + ) assert isinstance(d.authors[0], Author) @@ -152,7 +155,7 @@ def test_custom_validation_on_nested_gets_run() -> None: def test_accessing_known_fields_returns_empty_value() -> None: - d = BlogPost() + d: Any = BlogPost() assert [] == d.authors @@ -162,7 +165,7 @@ def test_accessing_known_fields_returns_empty_value() -> None: def test_empty_values_are_not_serialized() -> None: - d = BlogPost( + d: Any = BlogPost( authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}], created=None ) diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index 2212b070..37ea76b8 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -26,13 +26,14 @@ # under the License. from datetime import datetime, timedelta +from typing import Any import pytest from opensearchpy import Range -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "kwargs, item", [ ({}, 1), @@ -44,11 +45,11 @@ ({"gt": datetime.now() - timedelta(seconds=10)}, datetime.now()), ], ) -def test_range_contains(kwargs, item) -> None: +def test_range_contains(kwargs: Any, item: Any) -> None: assert item in Range(**kwargs) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "kwargs, item", [ ({"gt": -1}, -1), @@ -58,11 +59,11 @@ def test_range_contains(kwargs, item) -> None: ({"lte": datetime.now() - timedelta(seconds=10)}, datetime.now()), ], ) -def test_range_not_contains(kwargs, item): +def test_range_not_contains(kwargs: Any, item: Any) -> None: assert item not in Range(**kwargs) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "args,kwargs", [ (({},), {"lt": 42}), @@ -72,12 +73,12 @@ def test_range_not_contains(kwargs, item): ((), {"gt": 1, "gte": 1}), ], ) -def test_range_raises_value_error_on_wrong_params(args, kwargs) -> None: +def test_range_raises_value_error_on_wrong_params(args: Any, kwargs: Any) -> None: with pytest.raises(ValueError): Range(*args, **kwargs) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "range,lower,inclusive", [ (Range(gt=1), 1, False), @@ -86,11 +87,11 @@ def test_range_raises_value_error_on_wrong_params(args, kwargs) -> None: (Range(lt=42), None, False), ], ) -def test_range_lower(range, lower, inclusive) -> None: +def test_range_lower(range: Any, lower: Any, inclusive: Any) -> None: assert (lower, inclusive) == range.lower -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "range,upper,inclusive", [ (Range(lt=1), 1, False), @@ -99,5 +100,5 @@ def test_range_lower(range, lower, inclusive) -> None: (Range(gt=42), None, False), ], ) -def test_range_upper(range, upper, inclusive) -> None: +def test_range_upper(range: Any, upper: Any, inclusive: Any) -> None: assert (upper, inclusive) == range.upper diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index d7fef3e8..d425fabf 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -30,6 +30,7 @@ import uuid from datetime import datetime from decimal import Decimal +from typing import Any try: import numpy as np @@ -212,7 +213,7 @@ def test_raises_serialization_error_on_dump_error(self) -> None: class TestDeserializer(TestCase): - def setup_method(self, _) -> None: + def setup_method(self, _: Any) -> None: self.de = Deserializer(DEFAULT_SERIALIZERS) def test_deserializes_json_by_default(self) -> None: diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index d3965fed..f6856bc0 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -26,6 +26,7 @@ # under the License. +from typing import Any from unittest import SkipTest from opensearchpy.helpers import test @@ -34,7 +35,7 @@ client = None -def get_client(**kwargs): +def get_client(**kwargs: Any) -> Any: global client if client is False: raise SkipTest("No client is available") @@ -66,5 +67,5 @@ def setup_module() -> None: class OpenSearchTestCase(BaseTestCase): @staticmethod - def _get_client(**kwargs): + def _get_client(**kwargs: Any) -> Any: return get_client(**kwargs) diff --git a/test_opensearchpy/test_server/conftest.py b/test_opensearchpy/test_server/conftest.py index 128c33eb..7acd581b 100644 --- a/test_opensearchpy/test_server/conftest.py +++ b/test_opensearchpy/test_server/conftest.py @@ -28,6 +28,7 @@ import os import time +from typing import Any import pytest @@ -40,11 +41,11 @@ # Used for OPENSEARCH_VERSION = "" OPENSEARCH_BUILD_HASH = "" -OPENSEARCH_REST_API_TESTS = [] +OPENSEARCH_REST_API_TESTS: Any = [] -@pytest.fixture(scope="session") -def sync_client_factory(): +@pytest.fixture(scope="session") # type: ignore +def sync_client_factory() -> Any: client = None try: # Configure the client optionally with an HTTP conn class @@ -63,7 +64,7 @@ def sync_client_factory(): # We do this little dance with the URL to force # Requests to respect 'headers: None' within rest API spec tests. client = opensearchpy.OpenSearch( - OPENSEARCH_URL.replace("elastic:changeme@", ""), **kw + OPENSEARCH_URL.replace("elastic:changeme@", ""), **kw # type: ignore ) # Wait for the cluster to report a status of 'yellow' @@ -83,8 +84,8 @@ def sync_client_factory(): client.close() -@pytest.fixture(scope="function") -def sync_client(sync_client_factory): +@pytest.fixture(scope="function") # type: ignore +def sync_client(sync_client_factory: Any) -> Any: try: yield sync_client_factory finally: diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 8be79616..35c92e14 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -27,10 +27,10 @@ import re from datetime import datetime +from typing import Any from pytest import fixture -from opensearchpy.client import OpenSearch from opensearchpy.connection.connections import add_connection from opensearchpy.helpers import bulk from opensearchpy.helpers.test import get_test_client @@ -45,32 +45,32 @@ from .test_document import Comment, History, PullRequest, User -@fixture(scope="session") -def client() -> OpenSearch: +@fixture(scope="session") # type: ignore +def client() -> Any: client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", client) return client -@fixture(scope="session") -def opensearch_version(client): +@fixture(scope="session") # type: ignore +def opensearch_version(client: Any) -> Any: info = client.info() print(info) yield tuple( int(x) - for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") + for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") # type: ignore ) -@fixture -def write_client(client): +@fixture # type: ignore +def write_client(client: Any) -> Any: yield client client.indices.delete("test-*", ignore=404) client.indices.delete_template("test-template", ignore=404) -@fixture(scope="session") -def data_client(client): +@fixture(scope="session") # type: ignore +def data_client(client: Any) -> Any: # create mappings create_git_index(client, "git") create_flat_git_index(client, "flat-git") @@ -82,8 +82,8 @@ def data_client(client): client.indices.delete("flat-git", ignore=404) -@fixture -def pull_request(write_client): +@fixture # type: ignore +def pull_request(write_client: Any) -> Any: PullRequest.init() pr = PullRequest( _id=42, @@ -106,8 +106,8 @@ def pull_request(write_client): return pr -@fixture -def setup_ubq_tests(client) -> str: +@fixture # type: ignore +def setup_ubq_tests(client: Any) -> str: index = "test-git" create_git_index(client, index) bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index 7fb8f234..ab5f66e2 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -26,7 +26,7 @@ # under the License. -from typing import Tuple +from typing import Any from mock import patch @@ -40,9 +40,9 @@ class FailingBulkClient(object): def __init__( self, - client, - fail_at: Tuple[int] = (2,), - fail_with=TransportError(599, "Error!", {}), + client: Any, + fail_at: Any = (2,), + fail_with: Any = TransportError(599, "Error!", {}), ) -> None: self.client = client self._called = 0 @@ -50,7 +50,7 @@ def __init__( self.transport = client.transport self._fail_with = fail_with - def bulk(self, *args, **kwargs): + def bulk(self, *args: Any, **kwargs: Any) -> Any: self._called += 1 if self._called in self._fail_at: raise self._fail_with @@ -98,7 +98,7 @@ def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: else: assert False, "exception should have been raised" - def test_different_op_types(self): + def test_different_op_types(self) -> Any: if self.opensearch_version() < (0, 90, 1): raise SkipTest("update supported since 0.90.1") self.client.index(index="i", id=45, body={}) @@ -218,7 +218,7 @@ def test_transport_error_is_raised_with_max_retries(self) -> None: fail_with=TransportError(429, "Rejected!", {}), ) - def streaming_bulk(): + def streaming_bulk() -> Any: results = list( helpers.streaming_bulk( failing_client, @@ -271,7 +271,7 @@ def test_stats_only_reports_numbers(self) -> None: self.assertEqual(0, failed) self.assertEqual(100, self.client.count(index="test-index")["count"]) - def test_errors_are_reported_correctly(self): + def test_errors_are_reported_correctly(self) -> None: self.client.indices.create( "i", { @@ -316,7 +316,7 @@ def test_error_is_raised(self) -> None: index="i", ) - def test_ignore_error_if_raised(self): + def test_ignore_error_if_raised(self) -> None: # ignore the status code 400 in tuple helpers.bulk( self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -349,7 +349,7 @@ def test_ignore_error_if_raised(self): failing_client = FailingBulkClient(self.client) helpers.bulk(failing_client, [{"a": 42}], index="i", ignore_status=(599,)) - def test_errors_are_collected_properly(self): + def test_errors_are_collected_properly(self) -> None: self.client.indices.create( "i", { @@ -384,12 +384,12 @@ class TestScan(OpenSearchTestCase): }, ] - def teardown_method(self, m) -> None: + def teardown_method(self, m: Any) -> None: self.client.transport.perform_request("DELETE", "/_search/scroll/_all") super(TestScan, self).teardown_method(m) - def test_order_can_be_preserved(self): - bulk = [] + def test_order_can_be_preserved(self) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -408,8 +408,8 @@ def test_order_can_be_preserved(self): self.assertEqual(list(map(str, range(100))), list(d["_id"] for d in docs)) self.assertEqual(list(range(100)), list(d["_source"]["answer"] for d in docs)) - def test_all_documents_are_read(self): - bulk = [] + def test_all_documents_are_read(self) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -421,8 +421,8 @@ def test_all_documents_are_read(self): self.assertEqual(set(map(str, range(100))), set(d["_id"] for d in docs)) self.assertEqual(set(range(100)), set(d["_source"]["answer"] for d in docs)) - def test_scroll_error(self): - bulk = [] + def test_scroll_error(self) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -456,7 +456,7 @@ def test_scroll_error(self): self.assertEqual(len(data), 3) self.assertEqual(data[-1], {"scroll_data": 42}) - def test_initial_search_error(self): + def test_initial_search_error(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -491,7 +491,7 @@ def test_no_scroll_id_fast_route(self) -> None: client_mock.scroll.assert_not_called() client_mock.clear_scroll.assert_not_called() - def test_scan_auth_kwargs_forwarded(self): + def test_scan_auth_kwargs_forwarded(self) -> None: for key, val in { "api_key": ("name", "value"), "http_auth": ("username", "password"), @@ -510,7 +510,7 @@ def test_scan_auth_kwargs_forwarded(self): } client_mock.clear_scroll.return_value = {} - data = list(helpers.scan(self.client, index="test_index", **{key: val})) + data = list(helpers.scan(self.client, index="test_index", **{key: val})) # type: ignore self.assertEqual(data, [{"search_data": 1}]) @@ -523,7 +523,7 @@ def test_scan_auth_kwargs_forwarded(self): ): self.assertEqual(api_mock.call_args[1][key], val) - def test_scan_auth_kwargs_favor_scroll_kwargs_option(self): + def test_scan_auth_kwargs_favor_scroll_kwargs_option(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "scroll_id", @@ -555,8 +555,8 @@ def test_scan_auth_kwargs_favor_scroll_kwargs_option(self): self.assertEqual(client_mock.scroll.call_args[1]["sort"], "asc") @patch("opensearchpy.helpers.actions.logger") - def test_logger(self, logger_mock): - bulk = [] + def test_logger(self, logger_mock: Any) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -590,8 +590,8 @@ def test_logger(self, logger_mock): pass logger_mock.warning.assert_called() - def test_clear_scroll(self): - bulk = [] + def test_clear_scroll(self) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -617,7 +617,7 @@ def test_clear_scroll(self): ) spy.assert_not_called() - def test_shards_no_skipped_field(self): + def test_shards_no_skipped_field(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -646,8 +646,8 @@ def test_shards_no_skipped_field(self): class TestReindex(OpenSearchTestCase): - def setup_method(self, _): - bulk = [] + def setup_method(self, _: Any) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append( @@ -716,7 +716,7 @@ def test_all_documents_get_moved(self) -> None: class TestParentChildReindex(OpenSearchTestCase): - def setup_method(self, _): + def setup_method(self, _: Any) -> None: body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index 2da9388a..e965e05b 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -25,10 +25,12 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy import analyzer, token_filter, tokenizer -def test_simulate_with_just__builtin_tokenizer(client) -> None: +def test_simulate_with_just__builtin_tokenizer(client: Any) -> None: a = analyzer("my-analyzer", tokenizer="keyword") tokens = a.simulate("Hello World!", using=client).tokens @@ -36,7 +38,7 @@ def test_simulate_with_just__builtin_tokenizer(client) -> None: assert tokens[0].token == "Hello World!" -def test_simulate_complex(client) -> None: +def test_simulate_complex(client: Any) -> None: a = analyzer( "my-analyzer", tokenizer=tokenizer("split_words", "simple_pattern_split", pattern=":"), @@ -49,7 +51,7 @@ def test_simulate_complex(client) -> None: assert ["this", "works"] == [t.token for t in tokens] -def test_simulate_builtin(client) -> None: +def test_simulate_builtin(client: Any) -> None: a = analyzer("my-analyzer", "english") tokens = a.simulate("fixes running").tokens diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index 7bf9c27e..65f424d1 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -25,15 +25,17 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy.helpers.search import Q, Search -def test_count_all(data_client) -> None: +def test_count_all(data_client: Any) -> None: s = Search(using=data_client).index("git") assert 53 == s.count() -def test_count_prefetch(data_client, mocker) -> None: +def test_count_prefetch(data_client: Any, mocker: Any) -> None: mocker.spy(data_client, "count") search = Search(using=data_client).index("git") @@ -46,7 +48,7 @@ def test_count_prefetch(data_client, mocker) -> None: assert data_client.count.call_count == 1 -def test_count_filter(data_client) -> None: +def test_count_filter(data_client: Any) -> None: s = Search(using=data_client).index("git").filter(~Q("exists", field="parent_shas")) # initial commit + repo document assert 2 == s.count() diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 63302b7a..11ad915f 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -30,7 +30,7 @@ from typing import Any, Dict -def create_flat_git_index(client, index): +def create_flat_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -73,7 +73,7 @@ def create_flat_git_index(client, index): ) -def create_git_index(client, index): +def create_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -1095,7 +1095,7 @@ def create_git_index(client, index): ] -def flatten_doc(d) -> Dict[str, Any]: +def flatten_doc(d: Any) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1104,7 +1104,7 @@ def flatten_doc(d) -> Dict[str, Any]: FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d) -> Dict[str, Any]: +def create_test_git_data(d: Any) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index 0da4b856..ad0bf289 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -27,6 +27,7 @@ from datetime import datetime from ipaddress import ip_address +from typing import Any import pytest from pytest import raises @@ -78,7 +79,7 @@ class Repository(Document): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Any = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -131,7 +132,7 @@ class Index: name = "test-serialization" -def test_serialization(write_client): +def test_serialization(write_client: Any) -> None: SerializationDoc.init() write_client.index( index="test-serialization", @@ -161,7 +162,7 @@ def test_serialization(write_client): } -def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: +def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: history_query = Q( "nested", path="comments.history", @@ -189,7 +190,7 @@ def test_nested_inner_hits_are_wrapped_properly(pull_request) -> None: assert "score" in history.meta -def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None: +def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -204,7 +205,7 @@ def test_nested_inner_hits_are_deserialized_properly(pull_request) -> None: assert isinstance(pr.comments[0].created_at, datetime) -def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: +def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -216,7 +217,7 @@ def test_nested_top_hits_are_wrapped_properly(pull_request) -> None: assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -def test_update_object_field(write_client) -> None: +def test_update_object_field(write_client: Any) -> None: Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -236,7 +237,7 @@ def test_update_object_field(write_client) -> None: assert w.ranked == {"test1": 0.1, "topic2": 0.2} -def test_update_script(write_client) -> None: +def test_update_script(write_client: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -246,7 +247,7 @@ def test_update_script(write_client) -> None: assert w.views == 47 -def test_update_retry_on_conflict(write_client) -> None: +def test_update_retry_on_conflict(write_client: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -260,8 +261,8 @@ def test_update_retry_on_conflict(write_client) -> None: assert w.views == 52 -@pytest.mark.parametrize("retry_on_conflict", [None, 0]) -def test_update_conflicting_version(write_client, retry_on_conflict) -> None: +@pytest.mark.parametrize("retry_on_conflict", [None, 0]) # type: ignore +def test_update_conflicting_version(write_client: Any, retry_on_conflict: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -278,7 +279,7 @@ def test_update_conflicting_version(write_client, retry_on_conflict) -> None: ) -def test_save_and_update_return_doc_meta(write_client) -> None: +def test_save_and_update_return_doc_meta(write_client: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = w.save(return_doc_meta=True) @@ -302,31 +303,33 @@ def test_save_and_update_return_doc_meta(write_client) -> None: assert resp.keys().__contains__("_version") -def test_init(write_client) -> None: +def test_init(write_client: Any) -> None: Repository.init(index="test-git") assert write_client.indices.exists(index="test-git") -def test_get_raises_404_on_index_missing(data_client) -> None: +def test_get_raises_404_on_index_missing(data_client: Any) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php", index="not-there") -def test_get_raises_404_on_non_existent_id(data_client) -> None: +def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php") -def test_get_returns_none_if_404_ignored(data_client) -> None: +def test_get_returns_none_if_404_ignored(data_client: Any) -> None: assert None is Repository.get("opensearch-dsl-php", ignore=404) -def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client) -> None: +def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( + data_client: Any, +) -> None: assert None is Repository.get("42", index="not-there", ignore=404) -def test_get(data_client) -> None: +def test_get(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -334,15 +337,15 @@ def test_get(data_client) -> None: assert datetime(2014, 3, 3) == opensearch_repo.created_at -def test_exists_return_true(data_client) -> None: +def test_exists_return_true(data_client: Any) -> None: assert Repository.exists("opensearch-py") -def test_exists_false(data_client) -> None: +def test_exists_false(data_client: Any) -> None: assert not Repository.exists("opensearch-dsl-php") -def test_get_with_tz_date(data_client) -> None: +def test_get_with_tz_date(data_client: Any) -> None: first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -354,7 +357,7 @@ def test_get_with_tz_date(data_client) -> None: ) -def test_save_with_tz_date(data_client) -> None: +def test_save_with_tz_date(data_client: Any) -> None: tzinfo = timezone("Europe/Prague") first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -381,7 +384,7 @@ def test_save_with_tz_date(data_client) -> None: ] -def test_mget(data_client) -> None: +def test_mget(data_client: Any) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -389,23 +392,23 @@ def test_mget(data_client) -> None: assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_mget_raises_exception_when_missing_param_is_invalid(data_client) -> None: +def test_mget_raises_exception_when_missing_param_is_invalid(data_client: Any) -> None: with raises(ValueError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -def test_mget_raises_404_when_missing_param_is_raise(data_client) -> None: +def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: with raises(NotFoundError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") -def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client) -> None: +def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client: Any) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_update_works_from_search_response(data_client) -> None: +def test_update_works_from_search_response(data_client: Any) -> None: opensearch_repo = Repository.search().execute()[0] opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -416,7 +419,7 @@ def test_update_works_from_search_response(data_client) -> None: assert "opensearch" == new_version.owner.name -def test_update(data_client) -> None: +def test_update(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -440,7 +443,7 @@ def test_update(data_client) -> None: assert "primary_term" in new_version.meta -def test_save_updates_existing_doc(data_client) -> None: +def test_save_updates_existing_doc(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -453,7 +456,7 @@ def test_save_updates_existing_doc(data_client) -> None: assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> None: +def test_save_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -461,7 +464,7 @@ def test_save_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo.save() -def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> None: +def test_delete_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -469,13 +472,13 @@ def test_delete_automatically_uses_seq_no_and_primary_term(data_client) -> None: opensearch_repo.delete() -def assert_doc_equals(expected, actual) -> None: +def assert_doc_equals(expected: Any, actual: Any) -> None: for f in expected: assert f in actual assert actual[f] == expected[f] -def test_can_save_to_different_index(write_client): +def test_can_save_to_different_index(write_client: Any) -> None: test_repo = Repository(description="testing", meta={"id": 42}) assert test_repo.save(index="test-document") @@ -490,7 +493,7 @@ def test_can_save_to_different_index(write_client): ) -def test_save_without_skip_empty_will_include_empty_fields(write_client) -> None: +def test_save_without_skip_empty_will_include_empty_fields(write_client: Any) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert test_repo.save(index="test-document", skip_empty=False) @@ -505,7 +508,7 @@ def test_save_without_skip_empty_will_include_empty_fields(write_client) -> None ) -def test_delete(write_client) -> None: +def test_delete(write_client: Any) -> None: write_client.create( index="test-document", id="opensearch-py", @@ -526,11 +529,11 @@ def test_delete(write_client) -> None: ) -def test_search(data_client) -> None: +def test_search(data_client: Any) -> None: assert Repository.search().count() == 1 -def test_search_returns_proper_doc_classes(data_client) -> None: +def test_search_returns_proper_doc_classes(data_client: Any) -> None: result = Repository.search().execute() opensearch_repo = result.hits[0] @@ -539,11 +542,13 @@ def test_search_returns_proper_doc_classes(data_client) -> None: assert opensearch_repo.owner.name == "opensearch" -def test_refresh_mapping(data_client) -> None: +def test_refresh_mapping(data_client: Any) -> None: class Commit(Document): class Index: name = "git" + _index: Any + Commit._index.load_mappings() assert "stats" in Commit._index._mapping @@ -553,7 +558,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -def test_highlight_in_meta(data_client) -> None: +def test_highlight_in_meta(data_client: Any) -> None: commit = ( Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index 4656d4b2..38dd40cd 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -26,6 +26,7 @@ # under the License. from datetime import datetime +from typing import Any import pytest @@ -66,8 +67,8 @@ class MetricSearch(FacetedSearch): } -@pytest.fixture(scope="session") -def commit_search_cls(opensearch_version): +@pytest.fixture(scope="session") # type: ignore +def commit_search_cls(opensearch_version: Any) -> Any: interval_kwargs = {"fixed_interval": "1d"} class CommitSearch(FacetedSearch): @@ -91,8 +92,8 @@ class CommitSearch(FacetedSearch): return CommitSearch -@pytest.fixture(scope="session") -def repo_search_cls(opensearch_version): +@pytest.fixture(scope="session") # type: ignore +def repo_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class RepoSearch(FacetedSearch): @@ -105,15 +106,15 @@ class RepoSearch(FacetedSearch): ), } - def search(self): + def search(self) -> Any: s = super(RepoSearch, self).search() return s.filter("term", commit_repo="repo") return RepoSearch -@pytest.fixture(scope="session") -def pr_search_cls(opensearch_version): +@pytest.fixture(scope="session") # type: ignore +def pr_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class PRSearch(FacetedSearch): @@ -131,7 +132,7 @@ class PRSearch(FacetedSearch): return PRSearch -def test_facet_with_custom_metric(data_client) -> None: +def test_facet_with_custom_metric(data_client: Any) -> None: ms = MetricSearch() r = ms.execute() @@ -140,7 +141,7 @@ def test_facet_with_custom_metric(data_client) -> None: assert dates[0] == 1399038439000 -def test_nested_facet(pull_request, pr_search_cls) -> None: +def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls() r = prs.execute() @@ -148,7 +149,7 @@ def test_nested_facet(pull_request, pr_search_cls) -> None: assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: +def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = prs.execute() @@ -160,7 +161,7 @@ def test_nested_facet_with_filter(pull_request, pr_search_cls) -> None: assert not r.hits -def test_datehistogram_facet(data_client, repo_search_cls) -> None: +def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = rs.execute() @@ -168,7 +169,7 @@ def test_datehistogram_facet(data_client, repo_search_cls) -> None: assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -def test_boolean_facet(data_client, repo_search_cls) -> None: +def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = rs.execute() @@ -179,7 +180,7 @@ def test_boolean_facet(data_client, repo_search_cls) -> None: def test_empty_search_finds_everything( - data_client, opensearch_version, commit_search_cls + data_client: Any, opensearch_version: Any, commit_search_cls: Any ) -> None: cs = commit_search_cls() r = cs.execute() @@ -225,7 +226,7 @@ def test_empty_search_finds_everything( def test_term_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls + data_client: Any, commit_search_cls: Any ) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) @@ -271,7 +272,7 @@ def test_term_filters_are_shown_as_selected_and_data_is_filtered( def test_range_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls + data_client: Any, commit_search_cls: Any ) -> None: cs = commit_search_cls(filters={"deletions": "better"}) @@ -280,7 +281,7 @@ def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -def test_pagination(data_client, commit_search_cls) -> None: +def test_pagination(data_client: Any, commit_search_cls: Any) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 8593459c..71f0501a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -25,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy import Date, Document, Index, IndexTemplate, Text from opensearchpy.helpers import analysis @@ -34,7 +36,7 @@ class Post(Document): published_from = Date() -def test_index_template_works(write_client) -> None: +def test_index_template_works(write_client: Any) -> None: it = IndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -55,7 +57,7 @@ def test_index_template_works(write_client) -> None: } == write_client.indices.get_mapping(index="test-blog") -def test_index_can_be_saved_even_with_settings(write_client) -> None: +def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: i = Index("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) i.save() @@ -67,12 +69,12 @@ def test_index_can_be_saved_even_with_settings(write_client) -> None: ) -def test_index_exists(data_client) -> None: +def test_index_exists(data_client: Any) -> None: assert Index("git").exists() assert not Index("not-there").exists() -def test_index_can_be_created_with_settings_and_mappings(write_client) -> None: +def test_index_can_be_created_with_settings_and_mappings(write_client: Any) -> None: i = Index("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -97,7 +99,7 @@ def test_index_can_be_created_with_settings_and_mappings(write_client) -> None: } -def test_delete(write_client) -> None: +def test_delete(write_client: Any) -> None: write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -108,7 +110,7 @@ def test_delete(write_client) -> None: assert not write_client.indices.exists(index="test-index") -def test_multiple_indices_with_same_doc_type_work(write_client) -> None: +def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: i1 = Index("test-index-1", using=write_client) i2 = Index("test-index-2", using=write_client) @@ -116,8 +118,8 @@ def test_multiple_indices_with_same_doc_type_work(write_client) -> None: i.document(Post) i.create() - for i in ("test-index-1", "test-index-2"): - settings = write_client.indices.get_settings(index=i) - assert settings[i]["settings"]["index"]["analysis"] == { + for j in ("test-index-1", "test-index-2"): + settings = write_client.indices.get_settings(index=j) + assert settings[j]["settings"]["index"]["analysis"] == { "analyzer": {"my_analyzer": {"type": "custom", "tokenizer": "keyword"}} } diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index 50a80dea..722a249e 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -25,13 +25,15 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from pytest import raises from opensearchpy import exceptions from opensearchpy.helpers import analysis, mapping -def test_mapping_saved_into_opensearch(write_client) -> None: +def test_mapping_saved_into_opensearch(write_client: Any) -> None: m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -52,7 +54,7 @@ def test_mapping_saved_into_opensearch(write_client) -> None: def test_mapping_saved_into_opensearch_when_index_already_exists_closed( - write_client, + write_client: Any, ) -> None: m = mapping.Mapping() m.field( @@ -77,7 +79,7 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_closed( def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( - write_client, + write_client: Any, ) -> None: m = mapping.Mapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -107,7 +109,7 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( } == write_client.indices.get_mapping(index="test-mapping") -def test_mapping_gets_updated_from_opensearch(write_client): +def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index 5e45645a..4fb00597 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -27,6 +27,8 @@ from __future__ import unicode_literals +from typing import Any + from pytest import raises from opensearchpy import ( @@ -50,7 +52,7 @@ class Repository(Document): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Any = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -62,7 +64,7 @@ class Index: name = "flat-git" -def test_filters_aggregation_buckets_are_accessible(data_client) -> None: +def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -83,7 +85,7 @@ def test_filters_aggregation_buckets_are_accessible(data_client) -> None: ) -def test_top_hits_are_wrapped_in_response(data_client) -> None: +def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -99,7 +101,7 @@ def test_top_hits_are_wrapped_in_response(data_client) -> None: assert isinstance(hits[0], Commit) -def test_inner_hits_are_wrapped_in_response(data_client) -> None: +def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: s = Search(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -110,7 +112,7 @@ def test_inner_hits_are_wrapped_in_response(data_client) -> None: assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: +def test_scan_respects_doc_types(data_client: Any) -> None: repos = list(Repository.search().scan()) assert 1 == len(repos) @@ -118,7 +120,7 @@ def test_scan_respects_doc_types(data_client) -> None: assert repos[0].organization == "opensearch" -def test_scan_iterates_through_all_docs(data_client) -> None: +def test_scan_iterates_through_all_docs(data_client: Any) -> None: s = Search(index="flat-git") commits = list(s.scan()) @@ -127,7 +129,7 @@ def test_scan_iterates_through_all_docs(data_client) -> None: assert {d["_id"] for d in FLAT_DATA} == {c.meta.id for c in commits} -def test_response_is_cached(data_client) -> None: +def test_response_is_cached(data_client: Any) -> None: s = Repository.search() repos = list(s) @@ -135,7 +137,7 @@ def test_response_is_cached(data_client) -> None: assert s._response.hits == repos -def test_multi_search(data_client) -> None: +def test_multi_search(data_client: Any) -> None: s1 = Repository.search() s2 = Search(index="flat-git") @@ -152,7 +154,7 @@ def test_multi_search(data_client) -> None: assert r2._search is s2 -def test_multi_missing(data_client) -> None: +def test_multi_missing(data_client: Any) -> None: s1 = Repository.search() s2 = Search(index="flat-git") s3 = Search(index="does_not_exist") @@ -175,7 +177,7 @@ def test_multi_missing(data_client) -> None: assert r3 is None -def test_raw_subfield_can_be_used_in_aggs(data_client) -> None: +def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: s = Search(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index fb46e956..dfc4d250 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -25,11 +25,13 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy.helpers.search import Q from opensearchpy.helpers.update_by_query import UpdateByQuery -def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: +def test_update_by_query_no_script(write_client: Any, setup_ubq_tests: Any) -> None: index = setup_ubq_tests ubq = ( @@ -48,7 +50,7 @@ def test_update_by_query_no_script(write_client, setup_ubq_tests) -> None: assert response.success() -def test_update_by_query_with_script(write_client, setup_ubq_tests) -> None: +def test_update_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: index = setup_ubq_tests ubq = ( @@ -65,7 +67,7 @@ def test_update_by_query_with_script(write_client, setup_ubq_tests) -> None: assert response.version_conflicts == 0 -def test_delete_by_query_with_script(write_client, setup_ubq_tests) -> None: +def test_delete_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index d127edb1..aa1eaf6c 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -23,7 +23,7 @@ class TestAlertingPlugin(OpenSearchTestCase): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_create_destination(self): + def test_create_destination(self) -> None: # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -54,7 +54,7 @@ def test_get_destination(self) -> None: (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_create_monitor(self): + def test_create_monitor(self) -> None: # Create a dummy destination self.test_create_destination() diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index ba16d044..3249f41b 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -36,6 +36,7 @@ import re import warnings import zipfile +from typing import Any import pytest import urllib3 @@ -142,23 +143,23 @@ class YamlRunner: - def __init__(self, client) -> None: + def __init__(self, client: Any) -> None: self.client = client - self.last_response = None + self.last_response: Any = None - self._run_code = None - self._setup_code = None - self._teardown_code = None - self._state = {} + self._run_code: Any = None + self._setup_code: Any = None + self._teardown_code: Any = None + self._state: Any = {} - def use_spec(self, test_spec) -> None: + def use_spec(self, test_spec: Any) -> None: self._setup_code = test_spec.pop("setup", None) self._run_code = test_spec.pop("run", None) self._teardown_code = test_spec.pop("teardown", None) - def setup(self): + def setup(self) -> Any: # Pull skips from individual tests to not do unnecessary setup. - skip_code = [] + skip_code: Any = [] for action in self._run_code: assert len(action) == 1 action_type, _ = list(action.items())[0] @@ -174,12 +175,12 @@ def setup(self): if self._setup_code: self.run_code(self._setup_code) - def teardown(self) -> None: + def teardown(self) -> Any: if self._teardown_code: self.section("teardown") self.run_code(self._teardown_code) - def opensearch_version(self): + def opensearch_version(self) -> Any: global OPENSEARCH_VERSION if OPENSEARCH_VERSION is None: version_string = (self.client.info())["version"]["number"] @@ -189,10 +190,10 @@ def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 99 for v in version) return OPENSEARCH_VERSION - def section(self, name) -> None: + def section(self, name: str) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - def run(self) -> None: + def run(self) -> Any: try: self.setup() self.section("test") @@ -203,7 +204,7 @@ def run(self) -> None: except Exception: pass - def run_code(self, test) -> None: + def run_code(self, test: Any) -> Any: """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 @@ -215,7 +216,7 @@ def run_code(self, test) -> None: else: raise RuntimeError("Invalid action type %r" % (action_type,)) - def run_do(self, action) -> None: + def run_do(self, action: Any) -> Any: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -267,7 +268,7 @@ def run_do(self, action) -> None: # Filter out warnings raised by other components. caught_warnings = [ - str(w.message) + str(w.message) # type: ignore for w in caught_warnings if w.category == OpenSearchWarning and str(w.message) not in allowed_warnings @@ -275,13 +276,13 @@ def run_do(self, action) -> None: # Sorting removes the issue with order raised. We only care about # if all warnings are raised in the single API call. - if warn and sorted(warn) != sorted(caught_warnings): + if warn and sorted(warn) != sorted(caught_warnings): # type: ignore raise AssertionError( "Expected warnings not equal to actual warnings: expected=%r actual=%r" % (warn, caught_warnings) ) - def run_catch(self, catch, exception) -> None: + def run_catch(self, catch: Any, exception: Any) -> None: if catch == "param": assert isinstance(exception, TypeError) return @@ -296,7 +297,7 @@ def run_catch(self, catch, exception) -> None: ) is not None self.last_response = exception.info - def run_skip(self, skip) -> None: + def run_skip(self, skip: Any) -> Any: global IMPLEMENTED_FEATURES if "features" in skip: @@ -318,32 +319,32 @@ def run_skip(self, skip) -> None: if min_version <= (self.opensearch_version()) <= max_version: pytest.skip(reason) - def run_gt(self, action) -> None: + def run_gt(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) > value - def run_gte(self, action) -> None: + def run_gte(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) >= value - def run_lt(self, action) -> None: + def run_lt(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) < value - def run_lte(self, action) -> None: + def run_lte(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) <= value - def run_set(self, action) -> None: + def run_set(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) self._state[value] = self._lookup(key) - def run_is_false(self, action) -> None: + def run_is_false(self, action: Any) -> None: try: value = self._lookup(action) except AssertionError: @@ -351,23 +352,23 @@ def run_is_false(self, action) -> None: else: assert value in FALSEY_VALUES - def run_is_true(self, action) -> None: + def run_is_true(self, action: Any) -> None: value = self._lookup(action) assert value not in FALSEY_VALUES - def run_length(self, action) -> None: + def run_length(self, action: Any) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) assert expected == len(value) - def run_match(self, action) -> None: + def run_match(self, action: Any) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) if ( - isinstance(expected, string_types) + isinstance(expected, str) and expected.startswith("/") and expected.endswith("/") ): @@ -379,7 +380,7 @@ def run_match(self, action) -> None: else: self._assert_match_equals(value, expected) - def run_contains(self, action) -> None: + def run_contains(self, action: Any) -> None: for path, expected in action.items(): value = self._lookup(path) # list[dict[str,str]] is returned expected = self._resolve(expected) # dict[str, str] @@ -387,7 +388,7 @@ def run_contains(self, action) -> None: if expected not in value: raise AssertionError("%s is not contained by %s" % (expected, value)) - def run_transform_and_set(self, action) -> None: + def run_transform_and_set(self, action: Any) -> None: for key, value in action.items(): # Convert #base64EncodeCredentials(id,api_key) to ["id", "api_key"] if "#base64EncodeCredentials" in value: @@ -397,7 +398,7 @@ def run_transform_and_set(self, action) -> None: (self._lookup(value[0]), self._lookup(value[1])) ) - def _resolve(self, value): + def _resolve(self, value: Any) -> Any: # resolve variables if isinstance(value, string_types) and "$" in value: for k, v in self._state.items(): @@ -422,12 +423,13 @@ def _resolve(self, value): value = list(map(self._resolve, value)) return value - def _lookup(self, path): + def _lookup(self, path: str) -> Any: # fetch the possibly nested value from last_response - value = self.last_response + value: Any = self.last_response if path == "$body": return value path = path.replace(r"\.", "\1") + step: Any for step in path.split("."): if not step: continue @@ -449,10 +451,10 @@ def _lookup(self, path): value = value[step] return value - def _feature_enabled(self, name) -> bool: + def _feature_enabled(self, name: str) -> Any: return False - def _assert_match_equals(self, a, b) -> None: + def _assert_match_equals(self, a: Any, b: Any) -> None: # Handle for large floating points with 'E' if isinstance(b, string_types) and isinstance(a, float) and "e" in repr(a): a = repr(a).replace("e+", "E") @@ -460,8 +462,8 @@ def _assert_match_equals(self, a, b) -> None: assert a == b, "%r does not match %r" % (a, b) -@pytest.fixture(scope="function") -def sync_runner(sync_client): +@pytest.fixture(scope="function") # type: ignore +def sync_runner(sync_client: Any) -> Any: return YamlRunner(sync_client) @@ -532,8 +534,8 @@ def sync_runner(sync_client): if not RUN_ASYNC_REST_API_TESTS: - @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - def test_rest_api_spec(test_spec, sync_runner) -> None: + @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore + def test_rest_api_spec(test_spec: Any, sync_runner: Any) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") sync_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 5c719953..e43b2278 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -114,7 +114,7 @@ def test_create_user_with_body_param_empty(self) -> None: else: assert False - def test_create_user_with_role(self): + def test_create_user_with_role(self) -> None: self.test_create_role() # Test to create user diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index a69a7cf0..dc1a8f9e 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -30,6 +30,7 @@ import json import time +from typing import Any from mock import patch @@ -42,14 +43,14 @@ class DummyConnection(Connection): - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: Any) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) - self.calls = [] + self.calls: Any = [] super(DummyConnection, self).__init__(**kwargs) - def perform_request(self, *args, **kwargs): + def perform_request(self, *args: Any, **kwargs: Any) -> Any: self.calls.append((args, kwargs)) if self.exception: raise self.exception @@ -119,20 +120,20 @@ def test_cluster_manager_only_nodes_are_ignored(self) -> None: chosen = [ i for i, node_info in enumerate(nodes) - if get_host_info(node_info, i) is not None + if get_host_info(node_info, i) is not None # type: ignore ] self.assertEqual([1, 2, 3, 4], chosen) class TestTransport(TestCase): def test_single_connection_uses_dummy_connection_pool(self) -> None: - t = Transport([{}]) - self.assertIsInstance(t.connection_pool, DummyConnectionPool) - t = Transport([{"host": "localhost"}]) - self.assertIsInstance(t.connection_pool, DummyConnectionPool) + t1: Any = Transport([{}]) + self.assertIsInstance(t1.connection_pool, DummyConnectionPool) + t2: Any = Transport([{"host": "localhost"}]) + self.assertIsInstance(t2.connection_pool, DummyConnectionPool) def test_request_timeout_extracted_from_params_and_passed(self) -> None: - t = Transport([{}], connection_class=DummyConnection) + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"request_timeout": 42}) self.assertEqual(1, len(t.get_connection().calls)) @@ -143,7 +144,7 @@ def test_request_timeout_extracted_from_params_and_passed(self) -> None: ) def test_timeout_extracted_from_params_and_passed(self) -> None: - t = Transport([{}], connection_class=DummyConnection) + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"timeout": 84}) self.assertEqual(1, len(t.get_connection().calls)) @@ -154,7 +155,7 @@ def test_timeout_extracted_from_params_and_passed(self) -> None: ) def test_opaque_id(self) -> None: - t = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) + t: Any = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) t.perform_request("GET", "/") self.assertEqual(1, len(t.get_connection().calls)) @@ -174,7 +175,7 @@ def test_opaque_id(self) -> None: ) def test_request_with_custom_user_agent_header(self) -> None: - t = Transport([{}], connection_class=DummyConnection) + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", headers={"user-agent": "my-custom-value/1.2.3"}) self.assertEqual(1, len(t.get_connection().calls)) @@ -188,7 +189,9 @@ def test_request_with_custom_user_agent_header(self) -> None: ) def test_send_get_body_as_source(self) -> None: - t = Transport([{}], send_get_body_as="source", connection_class=DummyConnection) + t: Any = Transport( + [{}], send_get_body_as="source", connection_class=DummyConnection + ) t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) @@ -197,14 +200,16 @@ def test_send_get_body_as_source(self) -> None: ) def test_send_get_body_as_post(self) -> None: - t = Transport([{}], send_get_body_as="POST", connection_class=DummyConnection) + t: Any = Transport( + [{}], send_get_body_as="POST", connection_class=DummyConnection + ) t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("POST", "/", None, b"{}"), t.get_connection().calls[0][0]) def test_body_gets_encoded_into_bytes(self) -> None: - t = Transport([{}], connection_class=DummyConnection) + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好") self.assertEqual(1, len(t.get_connection().calls)) @@ -214,7 +219,7 @@ def test_body_gets_encoded_into_bytes(self) -> None: ) def test_body_bytes_get_passed_untouched(self) -> None: - t = Transport([{}], connection_class=DummyConnection) + t: Any = Transport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" t.perform_request("GET", "/", body=body) @@ -222,7 +227,7 @@ def test_body_bytes_get_passed_untouched(self) -> None: self.assertEqual(("GET", "/", None, body), t.get_connection().calls[0][0]) def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: - t = Transport([{}], connection_class=DummyConnection) + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好\uda6a") self.assertEqual(1, len(t.get_connection().calls)) @@ -232,26 +237,26 @@ def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: ) def test_kwargs_passed_on_to_connections(self) -> None: - t = Transport([{"host": "google.com"}], port=123) + t: Any = Transport([{"host": "google.com"}], port=123) self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://google.com:123", t.connection_pool.connections[0].host) def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() - t = Transport([{}, {}], dead_timeout=dt) + t: Any = Transport([{}, {}], dead_timeout=dt) self.assertIs(dt, t.connection_pool.dead_timeout) def test_custom_connection_class(self) -> None: - class MyConnection(object): - def __init__(self, **kwargs): + class MyConnection(Connection): + def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs - t = Transport([{}], connection_class=MyConnection) + t: Any = Transport([{}], connection_class=MyConnection) self.assertEqual(1, len(t.connection_pool.connections)) self.assertIsInstance(t.connection_pool.connections[0], MyConnection) def test_add_connection(self) -> None: - t = Transport([{}], randomize_hosts=False) + t: Any = Transport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) self.assertEqual(2, len(t.connection_pool.connections)) @@ -260,7 +265,7 @@ def test_add_connection(self) -> None: ) def test_request_will_fail_after_X_retries(self) -> None: - t = Transport( + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, ) @@ -269,7 +274,7 @@ def test_request_will_fail_after_X_retries(self) -> None: self.assertEqual(4, len(t.get_connection().calls)) def test_failed_connection_will_be_marked_as_dead(self) -> None: - t = Transport( + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, ) @@ -279,7 +284,7 @@ def test_failed_connection_will_be_marked_as_dead(self) -> None: def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: for method in ("GET", "HEAD"): - t = Transport([{}, {}], connection_class=DummyConnection) + t: Any = Transport([{}, {}], connection_class=DummyConnection) con1 = t.connection_pool.get_connection() con2 = t.connection_pool.get_connection() t.connection_pool.mark_dead(con1) @@ -290,7 +295,7 @@ def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: self.assertEqual(1, len(t.connection_pool.dead_count)) def test_sniff_will_use_seed_connections(self) -> None: - t = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) + t: Any = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) t.set_connections([{"data": "invalid"}]) t.sniff_hosts() @@ -298,7 +303,7 @@ def test_sniff_will_use_seed_connections(self) -> None: self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: - t = Transport( + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -307,7 +312,7 @@ def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_ignores_sniff_timeout(self) -> None: - t = Transport( + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -319,7 +324,7 @@ def test_sniff_on_start_ignores_sniff_timeout(self) -> None: ) def test_sniff_uses_sniff_timeout(self) -> None: - t = Transport( + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_timeout=42, @@ -330,8 +335,8 @@ def test_sniff_uses_sniff_timeout(self) -> None: t.seed_connections[0].calls[0], ) - def test_sniff_reuses_connection_instances_if_possible(self): - t = Transport( + def test_sniff_reuses_connection_instances_if_possible(self) -> None: + t: Any = Transport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, randomize_hosts=False, @@ -342,8 +347,8 @@ def test_sniff_reuses_connection_instances_if_possible(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertIs(connection, t.get_connection()) - def test_sniff_on_fail_triggers_sniffing_on_fail(self): - t = Transport( + def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -356,9 +361,11 @@ def test_sniff_on_fail_triggers_sniffing_on_fail(self): self.assertEqual("http://1.1.1.1:123", t.get_connection().host) @patch("opensearchpy.transport.Transport.sniff_hosts") - def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): + def test_sniff_on_fail_failing_does_not_prevent_retires( + self, sniff_hosts: Any + ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] - t = Transport( + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -374,7 +381,7 @@ def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): self.assertEqual(1, len(conn_data.calls)) def test_sniff_after_n_seconds(self) -> None: - t = Transport( + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniffer_timeout=5, @@ -394,7 +401,7 @@ def test_sniff_after_n_seconds(self) -> None: def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. - t = Transport( + t: Any = Transport( [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index 5aa4983b..50682d35 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -27,11 +27,12 @@ import time +from typing import Any from opensearchpy import OpenSearch -def wipe_cluster(client) -> None: +def wipe_cluster(client: Any) -> None: """Wipes a cluster clean between test cases""" close_after_wipe = False try: @@ -59,9 +60,9 @@ def wipe_cluster(client) -> None: client.close() -def wipe_cluster_settings(client) -> None: +def wipe_cluster_settings(client: Any) -> None: settings = client.cluster.get_settings() - new_settings = {} + new_settings: Any = {} for name, value in settings.items(): if value: new_settings.setdefault(name, {}) @@ -71,7 +72,7 @@ def wipe_cluster_settings(client) -> None: client.cluster.put_settings(body=new_settings) -def wipe_snapshots(client): +def wipe_snapshots(client: Any) -> None: """Deletes all the snapshots and repositories from the cluster""" in_progress_snapshots = [] @@ -96,14 +97,14 @@ def wipe_snapshots(client): assert in_progress_snapshots == [] -def wipe_data_streams(client) -> None: +def wipe_data_streams(client: Any) -> None: try: client.indices.delete_data_stream(name="*", expand_wildcards="all") except Exception: client.indices.delete_data_stream(name="*") -def wipe_indices(client) -> None: +def wipe_indices(client: Any) -> None: client.indices.delete( index="*,-.ds-ilm-history-*", expand_wildcards="all", @@ -111,7 +112,7 @@ def wipe_indices(client) -> None: ) -def wipe_searchable_snapshot_indices(client) -> None: +def wipe_searchable_snapshot_indices(client: Any) -> None: cluster_metadata = client.cluster.state( metric="metadata", filter_path="metadata.indices.*.settings.index.store.snapshot", @@ -121,17 +122,17 @@ def wipe_searchable_snapshot_indices(client) -> None: client.indices.delete(index=index) -def wipe_slm_policies(client) -> None: +def wipe_slm_policies(client: Any) -> None: for policy in client.slm.get_lifecycle(): client.slm.delete_lifecycle(policy_id=policy["name"]) -def wipe_auto_follow_patterns(client) -> None: +def wipe_auto_follow_patterns(client: Any) -> None: for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: client.ccr.delete_auto_follow_pattern(name=pattern["name"]) -def wipe_node_shutdown_metadata(client) -> None: +def wipe_node_shutdown_metadata(client: Any) -> None: shutdown_status = client.shutdown.get_node() # If response contains these two keys the feature flag isn't enabled # on this cluster so skip this step now. @@ -143,14 +144,14 @@ def wipe_node_shutdown_metadata(client) -> None: client.shutdown.delete_node(node_id=node_id) -def wipe_tasks(client) -> None: +def wipe_tasks(client: Any) -> None: tasks = client.tasks.list() for node_name, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): client.tasks.cancel(task_id=task_id, wait_for_completion=True) -def wait_for_pending_tasks(client, filter, timeout: int = 30) -> None: +def wait_for_pending_tasks(client: Any, filter: Any, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: tasks = client.cat.tasks(detailed=True).split("\n") @@ -158,7 +159,7 @@ def wait_for_pending_tasks(client, filter, timeout: int = 30) -> None: break -def wait_for_pending_datafeeds_and_jobs(client, timeout: int = 30) -> None: +def wait_for_pending_datafeeds_and_jobs(client: Any, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if ( @@ -171,7 +172,7 @@ def wait_for_pending_datafeeds_and_jobs(client, timeout: int = 30) -> None: break -def wait_for_cluster_state_updates_to_finish(client, timeout: int = 30) -> None: +def wait_for_cluster_state_updates_to_finish(client: Any, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if not client.cluster.pending_tasks().get("tasks", ()): diff --git a/utils/build-dists.py b/utils/build-dists.py index b45da98e..569ed7ea 100644 --- a/utils/build-dists.py +++ b/utils/build-dists.py @@ -38,13 +38,14 @@ import shutil import sys import tempfile +from typing import Any base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) tmp_dir = None -@contextlib.contextmanager -def set_tmp_dir(): +@contextlib.contextmanager # type: ignore +def set_tmp_dir() -> None: global tmp_dir tmp_dir = tempfile.mkdtemp() yield tmp_dir @@ -52,7 +53,7 @@ def set_tmp_dir(): tmp_dir = None -def run(*argv, expect_exit_code: int = 0) -> None: +def run(*argv: Any, expect_exit_code: int = 0) -> None: global tmp_dir if tmp_dir is None: os.chdir(base_dir) @@ -70,9 +71,9 @@ def run(*argv, expect_exit_code: int = 0) -> None: exit(exit_code or 1) -def test_dist(dist) -> None: - with set_tmp_dir() as tmp_dir: - dist_name = re.match( +def test_dist(dist: Any) -> None: + with set_tmp_dir() as tmp_dir: # type: ignore + dist_name = re.match( # type: ignore r"^(opensearchpy\d*)-", os.path.basename(dist) .replace("opensearch-py", "opensearchpy") @@ -216,7 +217,7 @@ def main() -> None: # alpha/beta/rc -> aN/bN/rcN else: pre_number = re.search(r"-(a|b|rc)(?:lpha|eta|)(\d+)$", expect_version) - version = version + pre_number.group(1) + pre_number.group(2) + version = version + pre_number.group(1) + pre_number.group(2) # type: ignore expect_version = re.sub( r"(?:-(?:SNAPSHOT|alpha\d+|beta\d+|rc\d+))+$", "", expect_version diff --git a/utils/generate-api.py b/utils/generate-api.py index f53e212c..792446dd 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -37,6 +37,7 @@ from itertools import chain, groupby from operator import itemgetter from pathlib import Path +from typing import Any, Dict import black import deepmerge @@ -78,27 +79,27 @@ ) -def blacken(filename) -> None: +def blacken(filename: Any) -> None: runner = CliRunner() result = runner.invoke(black.main, [str(filename)]) assert result.exit_code == 0, result.output @lru_cache() -def is_valid_url(url): +def is_valid_url(url: str) -> bool: return 200 <= http.request("HEAD", url).status < 400 class Module: - def __init__(self, namespace) -> None: - self.namespace = namespace - self._apis = [] + def __init__(self, namespace: str) -> None: + self.namespace: Any = namespace + self._apis: Any = [] self.parse_orig() - def add(self, api) -> None: + def add(self, api: Any) -> None: self._apis.append(api) - def parse_orig(self): + def parse_orig(self) -> None: self.orders = [] self.header = "from typing import Any, Collection, Optional, Tuple, Union\n\n" @@ -129,7 +130,7 @@ def parse_orig(self): r"\n (?:async )?def ([a-z_]+)\(", content, re.MULTILINE ) - def _position(self, api): + def _position(self, api: Any) -> Any: try: return self.orders.index(api.name) except ValueError: @@ -234,12 +235,12 @@ def dump(self) -> None: f.write(file_content) @property - def filepath(self): + def filepath(self) -> Any: return CODE_ROOT / f"opensearchpy/_async/client/{self.namespace}.py" class API: - def __init__(self, namespace, name, definition) -> None: + def __init__(self, namespace: str, name: str, definition: Any) -> None: self.namespace = namespace self.name = name @@ -284,7 +285,7 @@ def __init__(self, namespace, name, definition) -> None: print(f"URL {revised_url!r}, falling back on {self.doc_url!r}") @property - def all_parts(self): + def all_parts(self) -> Dict[str, str]: parts = {} for url in self._def["url"]["paths"]: parts.update(url.get("parts", {})) @@ -309,7 +310,7 @@ def all_parts(self): dynamic, components = self.url_parts - def ind(item): + def ind(item: Any) -> Any: try: return components.index(item[0]) except ValueError: @@ -319,29 +320,29 @@ def ind(item): return parts @property - def params(self): + def params(self) -> Any: parts = self.all_parts params = self._def.get("params", {}) return chain( - ((p, parts[p]) for p in parts if parts[p]["required"]), + ((p, parts[p]) for p in parts if parts[p]["required"]), # type: ignore (("body", self.body),) if self.body else (), ( (p, parts[p]) for p in parts - if not parts[p]["required"] and p not in params + if not parts[p]["required"] and p not in params # type: ignore ), sorted(params.items(), key=lambda x: (x[0] not in parts, x[0])), ) @property - def body(self): + def body(self) -> Any: b = self._def.get("body", {}) if b: b.setdefault("required", False) return b @property - def query_params(self): + def query_params(self) -> Any: return ( k for k in sorted(self._def.get("params", {}).keys()) @@ -349,7 +350,7 @@ def query_params(self): ) @property - def all_func_params(self): + def all_func_params(self) -> Any: """Parameters that will be in the '@query_params' decorator list and parameters that will be in the function signature. This doesn't include @@ -362,14 +363,14 @@ def all_func_params(self): return params @property - def path(self): + def path(self) -> Any: return max( (path for path in self._def["url"]["paths"]), key=lambda p: len(re.findall(r"\{([^}]+)\}", p["path"])), ) @property - def method(self): + def method(self) -> Any: # To adhere to the HTTP RFC we shouldn't send # bodies in GET requests. default_method = self.path["methods"][0] @@ -382,7 +383,7 @@ def method(self): return default_method @property - def url_parts(self): + def url_parts(self) -> Any: path = self.path["path"] dynamic = "{" in path @@ -403,14 +404,14 @@ def url_parts(self): return dynamic, parts @property - def required_parts(self): + def required_parts(self) -> Any: parts = self.all_parts - required = [p for p in parts if parts[p]["required"]] + required = [p for p in parts if parts[p]["required"]] # type: ignore if self.body.get("required"): required.append("body") return required - def to_python(self): + def to_python(self) -> Any: try: t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") except TemplateNotFound: @@ -423,7 +424,7 @@ def to_python(self): ) -def read_modules(): +def read_modules() -> Any: modules = {} # Load the OpenAPI specification file @@ -596,8 +597,8 @@ def read_modules(): if "POST" in methods or "PUT" in methods: api.update( { - "stability": "stable", - "visibility": "public", + "stability": "stable", # type: ignore + "visibility": "public", # type: ignore "headers": { "accept": ["application/json"], "content_type": ["application/json"], @@ -607,8 +608,8 @@ def read_modules(): else: api.update( { - "stability": "stable", - "visibility": "public", + "stability": "stable", # type: ignore + "visibility": "public", # type: ignore "headers": {"accept": ["application/json"]}, } ) @@ -641,7 +642,7 @@ def read_modules(): return modules -def apply_patch(namespace, name, api): +def apply_patch(namespace: str, name: str, api: Any) -> Any: override_file_path = ( CODE_ROOT / "utils/templates/overrides" / namespace / f"{name}.json" ) @@ -652,7 +653,7 @@ def apply_patch(namespace, name, api): return api -def dump_modules(modules): +def dump_modules(modules: Any) -> None: for mod in modules.values(): mod.dump() From 09c739412ecb888473019e14c63bdf8a7e82be4b Mon Sep 17 00:00:00 2001 From: Sandor Nemes Date: Fri, 10 Nov 2023 00:52:10 +0100 Subject: [PATCH 43/80] Set enable_cleanup_closed=True to drop TLS connections without a shutdown (#468) * Set enable_cleanup_closed=True to drop TLS connections without a shutdown AsyncOpenSearch seems to leak TLS connections due to a missing parameter in `aiohttp.TCPConnector`. This causes #172 and was also fixed "upstream" in this issue https://github.com/elastic/elasticsearch-py/issues/1910. Signed-off-by: Sandor Nemes * Update CHANGELOG.md Signed-off-by: Sandor Nemes --------- Signed-off-by: Sandor Nemes Signed-off-by: Daniel (dB.) Doubrovkine Co-authored-by: Daniel (dB.) Doubrovkine Signed-off-by: roma2023 --- CHANGELOG.md | 3 ++- opensearchpy/_async/http_aiohttp.py | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 96ce97dc..b8dafb82 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Use API generator for all APIs ([#551](https://github.com/opensearch-project/opensearch-py/pull/551)) - Merge `.pyi` type stubs inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) - Expanded type coverage to benchmarks, samples and tests ([#566](https://github.com/opensearch-project/opensearch-py/pull/566)) +- Defaulted `enable_cleanup_closed=True` in `aiohttp.TCPConnector` to prevent TLS connection leaks ([#468](https://github.com/opensearch-project/opensearch-py/pull/468)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed @@ -163,4 +164,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) [2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 [2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 [2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 -[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 \ No newline at end of file +[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index 3c7010ed..f14d5384 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -376,7 +376,10 @@ async def _create_aiohttp_session(self) -> Any: cookie_jar=aiohttp.DummyCookieJar(), response_class=OpenSearchClientResponse, connector=aiohttp.TCPConnector( - limit=self._limit, use_dns_cache=True, ssl=self._ssl_context + limit=self._limit, + use_dns_cache=True, + enable_cleanup_closed=True, + ssl=self._ssl_context, ), trust_env=self._trust_env, ) From e60a6ca8dfab4f12dff87b1017f38d29deee0e06 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Thu, 9 Nov 2023 20:41:37 -0500 Subject: [PATCH 44/80] Added Windows CI. (#569) Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/test.yml | 1 + CHANGELOG.md | 1 + opensearchpy/_async/http_aiohttp.py | 4 ++- opensearchpy/connection/base.py | 5 ++++ .../test_async/test_connection.py | 27 ++++++++++--------- .../test_async/test_transport.py | 14 +++++++--- test_opensearchpy/test_transport.py | 14 +++++++--- 7 files changed, 45 insertions(+), 21 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bd0ac738..f79929bc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -14,6 +14,7 @@ jobs: - { os: 'ubuntu-latest', python-version: "3.10" } - { os: 'ubuntu-latest', python-version: "3.11" } - { os: 'macos-latest', python-version: "3.11" } + - { os: 'windows-latest', python-version: "3.11" } name: test (os=${{ matrix.entry.os }}, python=${{ matrix.entry.python-version }}) continue-on-error: ${{ matrix.entry.experimental || false }} diff --git a/CHANGELOG.md b/CHANGELOG.md index b8dafb82..9120d4a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added a utf-8 header to all .py files ([#557](https://github.com/opensearch-project/opensearch-py/pull/557)) - Added `samples`, `benchmarks` and `docs` to `nox -rs format` ([#556](https://github.com/opensearch-project/opensearch-py/pull/556)) - Added guide on the document lifecycle API(s) ([#559](https://github.com/opensearch-project/opensearch-py/pull/559)) +- Added Windows CI ([#569](https://github.com/opensearch-project/opensearch-py/pull/569)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index f14d5384..6ed1e884 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -183,7 +183,9 @@ def __init__( ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE - ca_certs = self.default_ca_certs() if ca_certs is None else ca_certs + if ca_certs is None: + ca_certs = self.default_ca_certs() + if verify_certs: if not ca_certs: raise ImproperlyConfigured( diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index 59418bfa..54308c72 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -138,6 +138,11 @@ def __eq__(self, other: object) -> bool: raise TypeError("Unsupported equality check for %s and %s" % (self, other)) return self.__hash__() == other.__hash__() + def __lt__(self, other: object) -> bool: + if not isinstance(other, Connection): + raise TypeError("Unsupported lt check for %s and %s" % (self, other)) + return self.__hash__() < other.__hash__() + def __hash__(self) -> int: return id(self) diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 9413d0e8..7969e987 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -37,7 +37,7 @@ import aiohttp import pytest from _pytest.mark.structures import MarkDecorator -from mock import patch +from mock import MagicMock, patch from multidict import CIMultiDict from pytest import raises @@ -254,26 +254,29 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> N == str(w[0].message) ) - @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_given_ca_certs( - self, load_verify_locations: Any, tmp_path: Any - ) -> None: + @patch("ssl.SSLContext", return_value=MagicMock()) + async def test_uses_given_ca_certs(self, ssl_context: Any, tmp_path: Any) -> None: path = tmp_path / "ca_certs.pem" path.touch() + ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True, ca_certs=str(path)) - load_verify_locations.assert_called_once_with(cafile=str(path)) + ssl_context.return_value.load_verify_locations.assert_called_once_with( + cafile=str(path) + ) - @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_default_ca_certs(self, load_verify_locations: Any) -> None: + @patch("ssl.SSLContext", return_value=MagicMock()) + async def test_uses_default_ca_certs(self, ssl_context: Any) -> None: + ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True) - load_verify_locations.assert_called_once_with( + ssl_context.return_value.load_verify_locations.assert_called_once_with( cafile=Connection.default_ca_certs() ) - @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_no_ca_certs(self, load_verify_locations: Any) -> None: + @patch("ssl.SSLContext", return_value=MagicMock()) + async def test_uses_no_ca_certs(self, ssl_context: Any) -> None: + ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True, verify_certs=False) - load_verify_locations.assert_not_called() + ssl_context.return_value.load_verify_locations.assert_not_called() async def test_trust_env(self) -> None: con: Any = AIOHttpConnection(trust_env=True) diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index 4ef80707..b494f83f 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -272,7 +272,7 @@ async def test_add_connection(self) -> None: async def test_request_will_fail_after_X_retries(self) -> None: t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}], + [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, ) @@ -287,7 +287,7 @@ async def test_request_will_fail_after_X_retries(self) -> None: async def test_failed_connection_will_be_marked_as_dead(self) -> None: t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}] * 2, + [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, ) @@ -381,7 +381,10 @@ async def test_sniff_reuses_connection_instances_if_possible(self) -> None: async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=0, @@ -407,7 +410,10 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires( ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=3, diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index dc1a8f9e..4b37e3ac 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -266,7 +266,7 @@ def test_add_connection(self) -> None: def test_request_will_fail_after_X_retries(self) -> None: t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}], + [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, ) @@ -275,7 +275,7 @@ def test_request_will_fail_after_X_retries(self) -> None: def test_failed_connection_will_be_marked_as_dead(self) -> None: t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}] * 2, + [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, ) @@ -349,7 +349,10 @@ def test_sniff_reuses_connection_instances_if_possible(self) -> None: def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=0, @@ -366,7 +369,10 @@ def test_sniff_on_fail_failing_does_not_prevent_retires( ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=3, From 2dac852ef66788ade87987ec881c516e2e3eb866 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 10 Nov 2023 02:21:35 -0500 Subject: [PATCH 45/80] Expanded `nox -rs docs` to generate docs. (#568) Signed-off-by: dblock Signed-off-by: Daniel (dB.) Doubrovkine Signed-off-by: roma2023 --- CHANGELOG.md | 1 + DEVELOPER_GUIDE.md | 6 ++---- .../source/api-ref/clients/security_client.md | 2 +- noxfile.py | 7 +++---- opensearchpy/_async/client/__init__.py | 2 +- opensearchpy/_async/client/_patch.py | 20 +++++++++---------- opensearchpy/client/__init__.py | 2 +- opensearchpy/client/_patch.py | 20 +++++++++---------- 8 files changed, 29 insertions(+), 31 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9120d4a8..389469d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Merge `.pyi` type stubs inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) - Expanded type coverage to benchmarks, samples and tests ([#566](https://github.com/opensearch-project/opensearch-py/pull/566)) - Defaulted `enable_cleanup_closed=True` in `aiohttp.TCPConnector` to prevent TLS connection leaks ([#468](https://github.com/opensearch-project/opensearch-py/pull/468)) +- Expanded `nox -rs docs` to generate docs ([#568](https://github.com/opensearch-project/opensearch-py/pull/568)) ### Deprecated - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index f6cb568c..af281c39 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -110,12 +110,10 @@ $ nox -rs format To build the documentation with [Sphinx](https://www.sphinx-doc.org/). ``` -pip install -e .[docs] -cd docs -make html +$ nox -rs docs ``` -Open `opensearch-py/docs/build/html/index.html` to see results. +Open `docs/build/html/index.html` to see results. ## Client Code Generator diff --git a/docs/source/api-ref/clients/security_client.md b/docs/source/api-ref/clients/security_client.md index f8995ebf..c782d38d 100644 --- a/docs/source/api-ref/clients/security_client.md +++ b/docs/source/api-ref/clients/security_client.md @@ -1,5 +1,5 @@ # Security Client ```{eval-rst} -.. autoclass:: opensearchpy.clients.security.SecurityClient +.. autoclass:: opensearchpy.client.security.SecurityClient ``` diff --git a/noxfile.py b/noxfile.py index e9189cc9..4018ed47 100644 --- a/noxfile.py +++ b/noxfile.py @@ -101,10 +101,9 @@ def lint(session: Any) -> None: @nox.session() # type: ignore def docs(session: Any) -> None: session.install(".") - session.install( - "-rdev-requirements.txt", "sphinx-rtd-theme", "sphinx-autodoc-typehints" - ) - session.run("python", "-m", "pip", "install", "sphinx-autodoc-typehints") + session.install(".[docs]") + with session.chdir("docs"): + session.run("make", "html") @nox.session() # type: ignore diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 279fda37..a4defc45 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -84,7 +84,7 @@ class AsyncOpenSearch(Client): # create connection to localhost using the ThriftConnection client = OpenSearch(connection_class=ThriftConnection) - If you want to turn on :ref:`sniffing` you have several options (described + If you want to turn on sniffing you have several options (described in :class:`~opensearchpy.Transport`):: # create connection that will automatically inspect the cluster to get diff --git a/opensearchpy/_async/client/_patch.py b/opensearchpy/_async/client/_patch.py index cbf24e0b..1b9bcb5d 100644 --- a/opensearchpy/_async/client/_patch.py +++ b/opensearchpy/_async/client/_patch.py @@ -23,8 +23,8 @@ async def list_all_point_in_time( .. warning:: - This API will be removed in a future version - Use 'get_all_pits' API instead. + This API will be removed in a future version. + Use 'get_all_pits' API instead. """ warnings.warn( @@ -60,8 +60,8 @@ async def create_point_in_time( .. warning:: - This API will be removed in a future version - Use 'create_pit' API instead. + This API will be removed in a future version. + Use 'create_pit' API instead. """ warnings.warn( @@ -89,8 +89,8 @@ async def delete_point_in_time( .. warning:: - This API will be removed in a future version - Use 'delete_all_pits' or 'delete_pit' API instead. + This API will be removed in a future version. + Use 'delete_all_pits' or 'delete_pit' API instead. """ warnings.warn( @@ -111,8 +111,8 @@ async def health_check(self: Any, params: Any = None, headers: Any = None) -> An .. warning:: - This API will be removed in a future version - Use 'health' API instead. + This API will be removed in a future version. + Use 'health' API instead. """ warnings.warn( @@ -132,8 +132,8 @@ async def update_audit_config( .. warning:: - This API will be removed in a future version - Use 'update_audit_configuration' API instead. + This API will be removed in a future version. + Use 'update_audit_configuration' API instead. """ warnings.warn( diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 05af6764..446226c5 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -84,7 +84,7 @@ class OpenSearch(Client): # create connection to localhost using the ThriftConnection client = OpenSearch(connection_class=ThriftConnection) - If you want to turn on :ref:`sniffing` you have several options (described + If you want to turn on sniffing you have several options (described in :class:`~opensearchpy.Transport`):: # create connection that will automatically inspect the cluster to get diff --git a/opensearchpy/client/_patch.py b/opensearchpy/client/_patch.py index 3f156906..6f5a1edb 100644 --- a/opensearchpy/client/_patch.py +++ b/opensearchpy/client/_patch.py @@ -21,8 +21,8 @@ def list_all_point_in_time(self: Any, params: Any = None, headers: Any = None) - .. warning:: - This API will be removed in a future version - Use 'get_all_pits' API instead. + This API will be removed in a future version. + Use 'get_all_pits' API instead. """ warnings.warn( @@ -58,8 +58,8 @@ def create_point_in_time( .. warning:: - This API will be removed in a future version - Use 'create_pit' API instead. + This API will be removed in a future version. + Use 'create_pit' API instead. """ warnings.warn( @@ -87,8 +87,8 @@ def delete_point_in_time( .. warning:: - This API will be removed in a future version - Use 'delete_all_pits' or 'delete_pit' API instead. + This API will be removed in a future version. + Use 'delete_all_pits' or 'delete_pit' API instead. """ warnings.warn( @@ -109,8 +109,8 @@ def health_check(self: Any, params: Any = None, headers: Any = None) -> Any: .. warning:: - This API will be removed in a future version - Use 'health' API instead. + This API will be removed in a future version. + Use 'health' API instead. """ warnings.warn( @@ -130,8 +130,8 @@ def update_audit_config( .. warning:: - This API will be removed in a future version - Use 'update_audit_configuration' API instead. + This API will be removed in a future version. + Use 'update_audit_configuration' API instead. """ warnings.warn( From 663e20dbbbac41b56cb44bd5dd588487cf8ca3a8 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 10 Nov 2023 13:20:26 -0500 Subject: [PATCH 46/80] Fix asyncio warnings/markers. (#574) * Fix asyncio warnings/markers. Signed-off-by: dblock * Allow some flexibility in codecov. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- .github/codecov.yml | 5 +++++ .../test_async/test_http_connection.py | 5 ++--- .../test_async/test_plugins_client.py | 18 ++++++++++++------ 3 files changed, 19 insertions(+), 9 deletions(-) create mode 100644 .github/codecov.yml diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 00000000..e8abebc3 --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,5 @@ +coverage: + status: + project: + default: + threshold: 0.1% \ No newline at end of file diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index febb231b..94207433 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -30,15 +30,12 @@ import mock import pytest -from _pytest.mark.structures import MarkDecorator from multidict import CIMultiDict from opensearchpy._async._extra_imports import aiohttp # type: ignore from opensearchpy._async.compat import get_running_loop from opensearchpy.connection.http_async import AsyncHttpConnection -pytestmark: MarkDecorator = pytest.mark.asyncio - class TestAsyncHttpConnection: def test_auth_as_tuple(self) -> None: @@ -60,6 +57,7 @@ def auth_fn() -> None: c = AsyncHttpConnection(http_auth=auth_fn) assert callable(c._http_auth) + @pytest.mark.asyncio # type: ignore @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) async def test_basicauth_in_request_session(self, mock_request: Any) -> None: async def do_request(*args: Any, **kwargs: Any) -> Any: @@ -91,6 +89,7 @@ async def do_request(*args: Any, **kwargs: Any) -> Any: fingerprint=None, ) + @pytest.mark.asyncio # type: ignore @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) async def test_callable_in_request_session(self, mock_request: Any) -> None: def auth_fn(*args: Any, **kwargs: Any) -> Any: diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index d701892c..32a8ec3a 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -8,18 +8,24 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from unittest import TestCase + +import warnings + +import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.client import AsyncOpenSearch +pytestmark: MarkDecorator = pytest.mark.asyncio + -class TestPluginsClient(TestCase): +class TestPluginsClient: async def test_plugins_client(self) -> None: - with self.assertWarns(Warning) as w: + with warnings.catch_warnings(record=True) as w: client = AsyncOpenSearch() # testing double-init here client.plugins.__init__(client) # type: ignore - self.assertEqual( - str(w.warnings[0].message), - "Cannot load `alerting` directly to AsyncOpenSearch as it already exists. Use `AsyncOpenSearch.plugin.alerting` instead.", + assert ( + str(w[0].message) + == "Cannot load `alerting` directly to AsyncOpenSearch as it already exists. Use `AsyncOpenSearch.plugin.alerting` instead." ) From 39da24ed9362a877cb8c8ddf308e773ef88f575f Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 10 Nov 2023 13:26:10 -0500 Subject: [PATCH 47/80] Fix: assert deprecation. (#572) Signed-off-by: dblock Signed-off-by: roma2023 --- test_opensearchpy/test_serializer.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index d425fabf..4823a1d4 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -111,9 +111,7 @@ def test_serializes_numpy_floats(self) -> None: np.float32, np.float64, ): - self.assertRegexpMatches( - ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$' - ) + self.assertRegex(ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$') def test_serializes_numpy_datetime(self) -> None: requires_numpy_and_pandas() From 12f9db5408796c82471f4984c1f9449bba3eb358 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Sat, 11 Nov 2023 17:28:43 -0500 Subject: [PATCH 48/80] Fix: build and deploy docs. (#575) * Fix: build and deploy docs. Signed-off-by: dblock * Run nox -rs generate. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/ci.yml | 19 ------------------- .../{build_deploy_doc.yml => docs.yml} | 19 ++++++------------- CHANGELOG.md | 1 + README.md | 2 +- docs/Makefile | 2 +- opensearchpy/_async/client/__init__.py | 2 +- opensearchpy/client/__init__.py | 2 +- setup.py | 2 +- 8 files changed, 12 insertions(+), 37 deletions(-) rename .github/workflows/{build_deploy_doc.yml => docs.yml} (55%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 392d96a8..b7767643 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -52,22 +52,3 @@ jobs: - name: Check with Twine working-directory: dist run: twine check * - - test-docs-build: - runs-on: ubuntu-latest - steps: - - name: Checkout Repository - uses: actions/checkout@v3 - - name: Set up Python 3.7 - uses: actions/setup-python@v4 - with: - python-version: 3.7 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - - name: Check if Sphinx docs are built - run: | - pip install -e .[docs] - cd docs - make html - diff --git a/.github/workflows/build_deploy_doc.yml b/.github/workflows/docs.yml similarity index 55% rename from .github/workflows/build_deploy_doc.yml rename to .github/workflows/docs.yml index f858050b..333c3902 100644 --- a/.github/workflows/build_deploy_doc.yml +++ b/.github/workflows/docs.yml @@ -1,10 +1,5 @@ -name: Build & Deploy Doc -on: - push: - branches: [ main ] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: +name: Build & Deploy Docs +on: [push, pull_request, workflow_dispatch] jobs: build-and-deploy: @@ -15,19 +10,17 @@ jobs: with: python-version: 3.7 - - name: Install + - name: Install Dependencies run: | - python -m pip install -U pip - pip install --progress-bar off -U .[docs] + python3.7 -m pip install nox - name: Make run: | - cd docs - make html - cd .. + nox -rs docs - name: Deploy uses: peaceiris/actions-gh-pages@v3 + if: github.ref == 'refs/heads/main' with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./docs/build/html diff --git a/CHANGELOG.md b/CHANGELOG.md index 389469d9..778d7d3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Removed - Removed leftover support for Python 2.7 ([#548](https://github.com/opensearch-project/opensearch-py/pull/548)) ### Fixed +- Fixed automatically built and deployed docs ([575](https://github.com/opensearch-project/opensearch-py/pull/575)) ### Security ### Dependencies - Bumps `sphinx` from <7.1 to <7.3 diff --git a/README.md b/README.md index e4524469..7a6f595a 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ OpenSearch Python Client - [License](https://github.com/opensearch-project/opensearch-py#license) - [Copyright](https://github.com/opensearch-project/opensearch-py#copyright) -## Welcome! +# Welcome! **opensearch-py** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of elasticsearch-py licensed under the [Apache v2.0 License](https://github.com/opensearch-project/opensearch-py/blob/main/LICENSE.txt). diff --git a/docs/Makefile b/docs/Makefile index d0c3cbf1..a4de0bff 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line, and also # from the environment for the first two. -SPHINXOPTS ?= +SPHINXOPTS ?= -W SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index a4defc45..9c44ddaf 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -373,7 +373,7 @@ async def index( :arg if_seq_no: only perform the operation if the last operation that has changed the document has the specified sequence number. :arg op_type: Explicit operation type. Defaults to `index` for - requests with an explicit document ID, and to `create`for requests + requests with an explicit document ID, and to `create` for requests without an explicit document ID. Valid choices are index, create. :arg pipeline: The pipeline id to preprocess incoming documents with. diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 446226c5..aac80d83 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -373,7 +373,7 @@ def index( :arg if_seq_no: only perform the operation if the last operation that has changed the document has the specified sequence number. :arg op_type: Explicit operation type. Defaults to `index` for - requests with an explicit document ID, and to `create`for requests + requests with an explicit document ID, and to `create` for requests without an explicit document ID. Valid choices are index, create. :arg pipeline: The pipeline id to preprocess incoming documents with. diff --git a/setup.py b/setup.py index b608990e..9e6bca97 100644 --- a/setup.py +++ b/setup.py @@ -113,7 +113,7 @@ tests_require=tests_require, extras_require={ "develop": tests_require + docs_require + generate_require, - "docs": docs_require, + "docs": docs_require + async_require, "async": async_require, "kerberos": ["requests_kerberos"], }, From e5f5e97e3c602ce9ff75eb2f5791db1839cfcebe Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Sun, 12 Nov 2023 07:22:54 -0800 Subject: [PATCH 49/80] Removed EOL Python 3.6. Bumped urllib3 to 1.26.18 to patch sec vulnerability. (#576) Updated change log. Signed-off-by: Djcarrillo6 Signed-off-by: roma2023 --- CHANGELOG.md | 2 ++ setup.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 778d7d3e..47e183cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed - Removed leftover support for Python 2.7 ([#548](https://github.com/opensearch-project/opensearch-py/pull/548)) +- Removed leftover support for Python 3.6 ([#576](https://github.com/opensearch-project/opensearch-py/pull/576)) ### Fixed - Fixed automatically built and deployed docs ([575](https://github.com/opensearch-project/opensearch-py/pull/575)) ### Security @@ -47,6 +48,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Security ### Dependencies - Bumps `urllib3` from >=1.21.1, <2 to >=1.26.9 ([#518](https://github.com/opensearch-project/opensearch-py/pull/518)) +- Bumps `urllib3` from >=1.26.17 to >=1.26.18 ([#576](https://github.com/opensearch-project/opensearch-py/pull/576)) ## [2.3.1] ### Added diff --git a/setup.py b/setup.py index 9e6bca97..4bd6cb44 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,7 @@ if package == module_dir or package.startswith(module_dir + ".") ] install_requires = [ - "urllib3>=1.26.17", + "urllib3>=1.26.18", "requests>=2.4.0, <3.0.0", "six", "python-dateutil", From 438593cd2eb26cd5d587942800df010cc156ba51 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Sun, 12 Nov 2023 11:35:06 -0500 Subject: [PATCH 50/80] Remove support for local client in testing. (#573) Signed-off-by: dblock Signed-off-by: roma2023 --- test_opensearchpy/test_server/__init__.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index f6856bc0..650991ca 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -32,7 +32,7 @@ from opensearchpy.helpers import test from opensearchpy.helpers.test import OpenSearchTestCase as BaseTestCase -client = None +client: Any = None def get_client(**kwargs: Any) -> Any: @@ -42,18 +42,11 @@ def get_client(**kwargs: Any) -> Any: if client is not None and not kwargs: return client - # try and locate manual override in the local environment try: - from test_opensearchpy.local import get_client as local_get_client - - new_client = local_get_client(**kwargs) - except ImportError: - # fallback to using vanilla client - try: - new_client = test.get_test_client(**kwargs) - except SkipTest: - client = False - raise + new_client = test.get_test_client(**kwargs) + except SkipTest: + client = False + raise if not kwargs: client = new_client From b1198d797311fff0fd1d78823a5b0490779fa53a Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Sun, 12 Nov 2023 11:39:07 -0800 Subject: [PATCH 51/80] Fixed typo in CHANGELOG from merged PR #576 (#577) Signed-off-by: Djcarrillo6 Signed-off-by: roma2023 --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 47e183cf..6914682b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,6 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed - Removed leftover support for Python 2.7 ([#548](https://github.com/opensearch-project/opensearch-py/pull/548)) -- Removed leftover support for Python 3.6 ([#576](https://github.com/opensearch-project/opensearch-py/pull/576)) ### Fixed - Fixed automatically built and deployed docs ([575](https://github.com/opensearch-project/opensearch-py/pull/575)) ### Security From 5ad65cefb569f5f43073dcae02dfbdcaccf4a062 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Sun, 12 Nov 2023 14:42:45 -0500 Subject: [PATCH 52/80] Avoid decoding request body unless it needs to be logged. (#571) Signed-off-by: dblock Signed-off-by: Daniel (dB.) Doubrovkine Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/http_aiohttp.py | 6 +- opensearchpy/connection/base.py | 29 +++----- .../test_async/test_connection.py | 37 +++++++++- .../test_requests_http_connection.py | 74 ++++++++++++++----- .../test_urllib3_http_connection.py | 55 ++++++++++++-- 6 files changed, 155 insertions(+), 47 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6914682b..f0a7f99b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,6 +34,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Removed leftover support for Python 2.7 ([#548](https://github.com/opensearch-project/opensearch-py/pull/548)) ### Fixed - Fixed automatically built and deployed docs ([575](https://github.com/opensearch-project/opensearch-py/pull/575)) +- Avoid decoding request body unless it needs to be logged ([#571](https://github.com/opensearch-project/opensearch-py/pull/571)) ### Security ### Dependencies - Bumps `sphinx` from <7.1 to <7.3 diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index 6ed1e884..f301918f 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -315,7 +315,7 @@ async def perform_request( except Exception as e: self.log_request_fail( method, - str(url), + url, url_path, orig_body, self.loop.time() - start, @@ -337,7 +337,7 @@ async def perform_request( if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail( method, - str(url), + url, url_path, orig_body, duration, @@ -351,7 +351,7 @@ async def perform_request( ) self.log_request_success( - method, str(url), url_path, orig_body, response.status, raw_data, duration + method, url, url_path, orig_body, response.status, raw_data, duration ) return response.status, response.headers, raw_data diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index 54308c72..a2774c15 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -189,6 +189,16 @@ def _pretty_json(self, data: Union[str, bytes]) -> str: # non-json data or a bulk request return data # type: ignore + def _log_request_response( + self, body: Optional[Union[str, bytes]], response: Optional[str] + ) -> None: + if logger.isEnabledFor(logging.DEBUG): + if body and isinstance(body, bytes): + body = body.decode("utf-8", "ignore") + logger.debug("> %s", body) + if response is not None: + logger.debug("< %s", response) + def _log_trace( self, method: str, @@ -246,17 +256,11 @@ def log_request_success( """Log a successful API call.""" # TODO: optionally pass in params instead of full_url and do urlencode only when needed - # body has already been serialized to utf-8, deserialize it for logging - # TODO: find a better way to avoid (de)encoding the body back and forth - if body and isinstance(body, bytes): - body = body.decode("utf-8", "ignore") - logger.info( "%s %s [status:%s request:%.3fs]", method, full_url, status_code, duration ) - logger.debug("> %s", body) - logger.debug("< %s", response) + self._log_request_response(body, response) self._log_trace(method, path, body, status_code, response, duration) def log_request_fail( @@ -283,18 +287,9 @@ def log_request_fail( exc_info=exception is not None, ) - # body has already been serialized to utf-8, deserialize it for logging - # TODO: find a better way to avoid (de)encoding the body back and forth - if body and isinstance(body, bytes): - body = body.decode("utf-8", "ignore") - - logger.debug("> %s", body) - + self._log_request_response(body, response) self._log_trace(method, path, body, status_code, response, duration) - if response is not None: - logger.debug("< %s", response) - def _raise_error( self, status_code: int, diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 7969e987..743add7b 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -44,7 +44,7 @@ from opensearchpy import AIOHttpConnection, AsyncOpenSearch, __versionstr__, serializer from opensearchpy.compat import reraise_exceptions from opensearchpy.connection import Connection, async_connections -from opensearchpy.exceptions import ConnectionError, TransportError +from opensearchpy.exceptions import ConnectionError, NotFoundError, TransportError from test_opensearchpy.TestHttpServer import TestHTTPServer pytestmark: MarkDecorator = pytest.mark.asyncio @@ -303,6 +303,41 @@ async def test_uncompressed_body_logged(self, logger: Any) -> None: assert '> {"example": "body"}' == req[0][0] % req[0][1:] assert "< {}" == resp[0][0] % resp[0][1:] + @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) + async def test_body_not_logged(self, logger: Any) -> None: + logger.isEnabledFor.return_value = False + + con = await self._get_mock_connection() + await con.perform_request("GET", "/", body=b'{"example": "body"}') + + assert logger.isEnabledFor.call_count == 1 + assert logger.debug.call_count == 0 + + @patch("opensearchpy.connection.base.logger") + async def test_failure_body_logged(self, logger: Any) -> None: + con = await self._get_mock_connection(response_code=404) + with pytest.raises(NotFoundError) as e: + await con.perform_request("GET", "/invalid", body=b'{"example": "body"}') + assert str(e.value) == "NotFoundError(404, '{}')" + + assert 2 == logger.debug.call_count + req, resp = logger.debug.call_args_list + + assert '> {"example": "body"}' == req[0][0] % req[0][1:] + assert "< {}" == resp[0][0] % resp[0][1:] + + @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) + async def test_failure_body_not_logged(self, logger: Any) -> None: + logger.isEnabledFor.return_value = False + + con = await self._get_mock_connection(response_code=404) + with pytest.raises(NotFoundError) as e: + await con.perform_request("GET", "/invalid") + assert str(e.value) == "NotFoundError(404, '{}')" + + assert logger.isEnabledFor.call_count == 1 + assert logger.debug.call_count == 0 + async def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index 7043ec54..bdfb97d7 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -33,7 +33,7 @@ from typing import Any import pytest -from mock import Mock, patch +from mock import MagicMock, Mock, patch from requests.auth import AuthBase from opensearchpy.connection import Connection, RequestsHttpConnection @@ -52,7 +52,7 @@ class TestRequestsHttpConnection(TestCase): def _get_mock_connection( self, connection_params: Any = {}, - status_code: int = 200, + response_code: int = 200, response_body: bytes = b"{}", ) -> Any: con = RequestsHttpConnection(**connection_params) @@ -60,7 +60,7 @@ def _get_mock_connection( def _dummy_send(*args: Any, **kwargs: Any) -> Any: dummy_response = Mock() dummy_response.headers = {} - dummy_response.status_code = status_code + dummy_response.status_code = response_code dummy_response.content = response_body dummy_response.request = args[0] dummy_response.cookies = {} @@ -229,20 +229,20 @@ def test_repr(self) -> None: ) def test_conflict_error_is_returned_on_409(self) -> None: - con = self._get_mock_connection(status_code=409) + con = self._get_mock_connection(response_code=409) self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") def test_not_found_error_is_returned_on_404(self) -> None: - con = self._get_mock_connection(status_code=404) + con = self._get_mock_connection(response_code=404) self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") def test_request_error_is_returned_on_400(self) -> None: - con = self._get_mock_connection(status_code=400) + con = self._get_mock_connection(response_code=400) self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") @patch("opensearchpy.connection.base.logger") def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: - con = self._get_mock_connection(status_code=404) + con = self._get_mock_connection(response_code=404) self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") self.assertEqual(0, logger.warning.call_count) @@ -250,7 +250,7 @@ def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: con = self._get_mock_connection( - response_body=b'{"answer": 42}', status_code=500 + response_body=b'{"answer": 42}', response_code=500 ) self.assertRaises( TransportError, @@ -326,7 +326,7 @@ def test_uncompressed_body_logged(self, logger: Any) -> None: con = self._get_mock_connection( connection_params={"http_compress": True}, - status_code=500, + response_code=500, response_body=b'{"hello":"world"}', ) with pytest.raises(TransportError): @@ -337,6 +337,41 @@ def test_uncompressed_body_logged(self, logger: Any) -> None: self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) + @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) + def test_body_not_logged(self, logger: Any) -> None: + logger.isEnabledFor.return_value = False + + con = self._get_mock_connection() + con.perform_request("GET", "/", body=b'{"example": "body"}') + + self.assertEqual(logger.isEnabledFor.call_count, 1) + self.assertEqual(logger.debug.call_count, 0) + + @patch("opensearchpy.connection.base.logger") + def test_failure_body_logged(self, logger: Any) -> None: + con = self._get_mock_connection(response_code=404) + with pytest.raises(NotFoundError) as e: + con.perform_request("GET", "/invalid", body=b'{"example": "body"}') + self.assertEqual(str(e.value), "NotFoundError(404, '{}')") + + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + + self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) + self.assertEqual("< {}", resp[0][0] % resp[0][1:]) + + @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) + def test_failure_body_not_logged(self, logger: Any) -> None: + logger.isEnabledFor.return_value = False + + con = self._get_mock_connection(response_code=404) + with pytest.raises(NotFoundError) as e: + con.perform_request("GET", "/invalid") + self.assertEqual(str(e.value), "NotFoundError(404, '{}')") + + self.assertEqual(logger.isEnabledFor.call_count, 1) + self.assertEqual(logger.debug.call_count, 0) + def test_defaults(self) -> None: con = self._get_mock_connection() request = self._get_request(con, "GET", "/") @@ -403,7 +438,7 @@ def send_raise(*_: Any, **__: Any) -> Any: with pytest.raises(RecursionError) as e: conn.perform_request("GET", "/") - assert str(e.value) == "Wasn't modified!" + self.assertEqual(str(e.value), "Wasn't modified!") def mock_session(self) -> Any: access_key = uuid.uuid4().hex @@ -472,7 +507,7 @@ def test_aws_signer_signs_with_query_string(self, mock_sign: Any) -> None: ) -class TestRequestsConnectionRedirect: +class TestRequestsConnectionRedirect(TestCase): server1: TestHTTPServer server2: TestHTTPServer @@ -495,20 +530,23 @@ def test_redirect_failure_when_allow_redirect_false(self) -> None: conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) with pytest.raises(TransportError) as e: conn.perform_request("GET", "/redirect", allow_redirects=False) - assert e.value.status_code == 302 + self.assertEqual(e.value.status_code, 302) # allow_redirects = True (Default) def test_redirect_success_when_allow_redirect_true(self) -> None: conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) user_agent = conn._get_default_user_agent() status, headers, data = conn.perform_request("GET", "/redirect") - assert status == 200 + self.assertEqual(status, 200) data = json.loads(data) - assert data["headers"] == { - "Host": "localhost:8090", - "Accept-Encoding": "identity", - "User-Agent": user_agent, - } + self.assertEqual( + data["headers"], + { + "Host": "localhost:8090", + "Accept-Encoding": "identity", + "User-Agent": user_agent, + }, + ) class TestSignerWithFrozenCredentials(TestRequestsHttpConnection): diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 9720283b..e22e943f 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -36,25 +36,29 @@ import pytest import urllib3 -from mock import Mock, patch +from mock import MagicMock, Mock, patch from urllib3._collections import HTTPHeaderDict from opensearchpy import __versionstr__ from opensearchpy.connection import Connection, Urllib3HttpConnection +from opensearchpy.exceptions import NotFoundError from ..test_cases import SkipTest, TestCase class TestUrllib3HttpConnection(TestCase): def _get_mock_connection( - self, connection_params: Any = {}, response_body: bytes = b"{}" + self, + connection_params: Any = {}, + response_body: bytes = b"{}", + response_code: int = 200, ) -> Any: con = Urllib3HttpConnection(**connection_params) def _dummy_urlopen(*args: Any, **kwargs: Any) -> Any: dummy_response = Mock() dummy_response.headers = HTTPHeaderDict({}) - dummy_response.status = 200 + dummy_response.status = response_code dummy_response.data = response_body _dummy_urlopen.call_args = (args, kwargs) # type: ignore return dummy_response @@ -219,11 +223,11 @@ def test_aws_signer_when_region_is_null(self) -> None: with pytest.raises(ValueError) as e: Urllib3AWSV4SignerAuth(session, None) - assert str(e.value) == "Region cannot be empty" + self.assertEqual(str(e.value), "Region cannot be empty") with pytest.raises(ValueError) as e: Urllib3AWSV4SignerAuth(session, "") - assert str(e.value) == "Region cannot be empty" + self.assertEqual(str(e.value), "Region cannot be empty") def test_aws_signer_when_credentials_is_null(self) -> None: region = "us-west-1" @@ -232,11 +236,11 @@ def test_aws_signer_when_credentials_is_null(self) -> None: with pytest.raises(ValueError) as e: Urllib3AWSV4SignerAuth(None, region) - assert str(e.value) == "Credentials cannot be empty" + self.assertEqual(str(e.value), "Credentials cannot be empty") with pytest.raises(ValueError) as e: Urllib3AWSV4SignerAuth("", region) - assert str(e.value) == "Credentials cannot be empty" + self.assertEqual(str(e.value), "Credentials cannot be empty") def test_aws_signer_when_service_is_specified(self) -> None: region = "us-west-1" @@ -339,6 +343,41 @@ def test_uncompressed_body_logged(self, logger: Any) -> None: self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) self.assertEqual("< {}", resp[0][0] % resp[0][1:]) + @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) + def test_body_not_logged(self, logger: Any) -> None: + logger.isEnabledFor.return_value = False + + con = self._get_mock_connection() + con.perform_request("GET", "/", body=b'{"example": "body"}') + + self.assertEqual(logger.isEnabledFor.call_count, 1) + self.assertEqual(logger.debug.call_count, 0) + + @patch("opensearchpy.connection.base.logger") + def test_failure_body_logged(self, logger: Any) -> None: + con = self._get_mock_connection(response_code=404) + with pytest.raises(NotFoundError) as e: + con.perform_request("GET", "/invalid", body=b'{"example": "body"}') + self.assertEqual(str(e.value), "NotFoundError(404, '{}')") + + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + + self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) + self.assertEqual("< {}", resp[0][0] % resp[0][1:]) + + @patch("opensearchpy.connection.base.logger", return_value=MagicMock()) + def test_failure_body_not_logged(self, logger: Any) -> None: + logger.isEnabledFor.return_value = False + + con = self._get_mock_connection(response_code=404) + with pytest.raises(NotFoundError) as e: + con.perform_request("GET", "/invalid") + self.assertEqual(str(e.value), "NotFoundError(404, '{}')") + + self.assertEqual(logger.isEnabledFor.call_count, 1) + self.assertEqual(logger.debug.call_count, 0) + def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) @@ -355,7 +394,7 @@ def urlopen_raise(*_: Any, **__: Any) -> Any: with pytest.raises(RecursionError) as e: conn.perform_request("GET", "/") - assert str(e.value) == "Wasn't modified!" + self.assertEqual(str(e.value), "Wasn't modified!") class TestSignerWithFrozenCredentials(TestUrllib3HttpConnection): From 7af7b5ffbe676b63f7a3a0be3fc595544b6c91a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 08:56:06 -0500 Subject: [PATCH 53/80] Update coverage requirement from <7.0.0 to <8.0.0 (#578) * Update coverage requirement from <7.0.0 to <8.0.0 Updates the requirements on [coverage](https://github.com/nedbat/coveragepy) to permit the latest version. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/coverage-3.0b1...7.3.2) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] Signed-off-by: roma2023 --- CHANGELOG.md | 3 ++- setup.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0a7f99b..5139acfd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Security ### Dependencies - Bumps `sphinx` from <7.1 to <7.3 +- Bumps `coverage` from <7.0.0 to <8.0.0 ## [2.3.2] ### Added @@ -169,4 +170,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) [2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 [2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 [2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 -[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 +[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 \ No newline at end of file diff --git a/setup.py b/setup.py index 4bd6cb44..dc613280 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ ] tests_require = [ "requests>=2.0.0, <3.0.0", - "coverage<7.0.0", + "coverage<8.0.0", "mock", "pyyaml", "pytest>=3.0.0", From 6aaa52c1d589cb337ce129687d6680876c5ccaad Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Mon, 13 Nov 2023 15:52:13 -0500 Subject: [PATCH 54/80] Added client-level REST helpers. (#544) * Added client-level REST helpers. Signed-off-by: dblock * Move functions into an .http namespace. Signed-off-by: dblock * Poetry update in samples. Signed-off-by: dblock * Fix: typo. Signed-off-by: dblock * Clarified what to use in which older versions. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + docs/source/api-ref/client.md | 1 + docs/source/api-ref/clients/http_client.md | 5 + guides/json.md | 46 +- noxfile.py | 2 +- opensearchpy/_async/client/__init__.py | 3 +- opensearchpy/_async/client/http.py | 129 ++++ opensearchpy/client/__init__.py | 3 +- opensearchpy/client/http.py | 129 ++++ samples/json/json-hello-async.py | 24 +- samples/json/json-hello.py | 16 +- samples/poetry.lock | 691 +++++++++++++++++--- test_opensearchpy/test_async/test_client.py | 94 +++ test_opensearchpy/test_async/test_http.py | 54 ++ test_opensearchpy/test_cases.py | 8 +- test_opensearchpy/test_client/test_http.py | 55 ++ 16 files changed, 1102 insertions(+), 159 deletions(-) create mode 100644 docs/source/api-ref/clients/http_client.md create mode 100644 opensearchpy/_async/client/http.py create mode 100644 opensearchpy/client/http.py create mode 100644 test_opensearchpy/test_async/test_client.py create mode 100644 test_opensearchpy/test_async/test_http.py create mode 100644 test_opensearchpy/test_client/test_http.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 5139acfd..7bb6a9e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added `samples`, `benchmarks` and `docs` to `nox -rs format` ([#556](https://github.com/opensearch-project/opensearch-py/pull/556)) - Added guide on the document lifecycle API(s) ([#559](https://github.com/opensearch-project/opensearch-py/pull/559)) - Added Windows CI ([#569](https://github.com/opensearch-project/opensearch-py/pull/569)) +- Added `client.http` JSON REST request API helpers ([#544](https://github.com/opensearch-project/opensearch-py/pull/544)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/docs/source/api-ref/client.md b/docs/source/api-ref/client.md index 3a2d2c2b..669deec8 100644 --- a/docs/source/api-ref/client.md +++ b/docs/source/api-ref/client.md @@ -21,6 +21,7 @@ titlesonly: maxdepth: 1 --- +clients/http_client clients/cat_client clients/cluster_client clients/dangling_indices_client diff --git a/docs/source/api-ref/clients/http_client.md b/docs/source/api-ref/clients/http_client.md new file mode 100644 index 00000000..7e361d5d --- /dev/null +++ b/docs/source/api-ref/clients/http_client.md @@ -0,0 +1,5 @@ +# Http Client + +```{eval-rst} +.. autoclass:: opensearchpy.client.http.HttpClient +``` diff --git a/guides/json.md b/guides/json.md index edefa209..832f2a6e 100644 --- a/guides/json.md +++ b/guides/json.md @@ -6,55 +6,69 @@ # Making Raw JSON REST Requests -The OpenSearch client implements many high-level REST DSLs that invoke OpenSearch APIs. However you may find yourself in a situation that requires you to invoke an API that is not supported by the client. Use `client.transport.perform_request` to do so. See [samples/json](../samples/json) for a complete working sample. +The OpenSearch client implements many high-level REST DSLs that invoke OpenSearch APIs. However you may find yourself in a situation that requires you to invoke an API that is not supported by the client. Use `client.http.get`, `head` , `put`, `post`, and `delete` to do so. See [samples/json](../samples/json) for a complete working sample. ## GET The following example returns the server version information via `GET /`. ```python -info = client.transport.perform_request('GET', '/') -print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") +info = client.get("/") +print(f"Welcome to {info["version"]["distribution"]} {info["version"]["number"]}!") ``` Note that the client will parse the response as JSON when appropriate. +These methods are also available in the asynchronous client. + +```python +info = await client.http.get("/") +print(f"Welcome to {info["version"]["distribution"]} {info["version"]["number"]}!") +``` + +Use `perform_request` in older versions (<= 2.3.x), and `client.http.get` and others in newer ones. + +```python +info = client.transport.perform_request("GET", "/") +print(f"Welcome to {info["version"]["distribution"]} {info["version"]["number"]}!") +``` + ## PUT The following example creates an index. ```python index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 + "settings": { + "index": { + "number_of_shards": 4 } } } -client.transport.perform_request("PUT", "/movies", body=index_body) +client.http.put("/movies", body=index_body) ``` -Note that the client will raise errors automatically. For example, if the index already exists, an `opensearchpy.exceptions.RequestError: RequestError(400, 'resource_already_exists_exception',` will be thrown. +Note that the client will raise errors automatically. For example, if the index already exists, an `opensearchpy.exceptions.RequestError: RequestError(400, "resource_already_exists_exception",` will be thrown. ## POST The following example searches for a document. ```python -q = 'miller' +q = "miller" query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] + "size": 5, + "query": { + "multi_match": { + "query": q, + "fields": ["title^2", "director"] } } } -client.transport.perform_request("POST", "/movies/_search", body = query) +client.http.post("/movies/_search", body = query) ``` ## DELETE @@ -62,5 +76,5 @@ client.transport.perform_request("POST", "/movies/_search", body = query) The following example deletes an index. ```python -client.transport.perform_request("DELETE", "/movies") +client.http.delete("/movies") ``` diff --git a/noxfile.py b/noxfile.py index 4018ed47..296ea8a4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -50,7 +50,7 @@ def test(session: Any) -> None: session.run("python", "setup.py", "test") -@nox.session() # type: ignore +@nox.session(python=["3.7"]) # type: ignore def format(session: Any) -> None: session.install("black", "isort") diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 9c44ddaf..fa8b5f04 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -47,6 +47,7 @@ from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient +from .http import HttpClient from .indices import IndicesClient from .ingest import IngestClient from .nodes import NodesClient @@ -229,8 +230,8 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.remote_store = RemoteStoreClient(self) self.features = FeaturesClient(self) - self.plugins = PluginsClient(self) + self.http = HttpClient(self) def __repr__(self) -> Any: try: diff --git a/opensearchpy/_async/client/http.py b/opensearchpy/_async/client/http.py new file mode 100644 index 00000000..89278c6b --- /dev/null +++ b/opensearchpy/_async/client/http.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Mapping, Optional + +from .client import Client +from .utils import NamespacedClient + + +class HttpClient(NamespacedClient): + def __init__(self, client: Client) -> None: + super(HttpClient, self).__init__(client) + + async def get( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a GET request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return await self.transport.perform_request( + "GET", url=url, headers=headers, params=params, body=body + ) + + async def head( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a HEAD request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return await self.transport.perform_request( + "HEAD", url=url, headers=headers, params=params, body=body + ) + + async def post( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a POST request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return await self.transport.perform_request( + "POST", url=url, headers=headers, params=params, body=body + ) + + async def delete( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a DELETE request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return await self.transport.perform_request( + "DELETE", url=url, headers=headers, params=params, body=body + ) + + async def put( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a PUT request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return await self.transport.perform_request( + "PUT", url=url, headers=headers, params=params, body=body + ) diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index aac80d83..a9c71552 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -47,6 +47,7 @@ from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient +from .http import HttpClient from .indices import IndicesClient from .ingest import IngestClient from .nodes import NodesClient @@ -229,8 +230,8 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.remote_store = RemoteStoreClient(self) self.features = FeaturesClient(self) - self.plugins = PluginsClient(self) + self.http = HttpClient(self) def __repr__(self) -> Any: try: diff --git a/opensearchpy/client/http.py b/opensearchpy/client/http.py new file mode 100644 index 00000000..d709bf47 --- /dev/null +++ b/opensearchpy/client/http.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Mapping, Optional + +from .client import Client +from .utils import NamespacedClient + + +class HttpClient(NamespacedClient): + def __init__(self, client: Client) -> None: + super(HttpClient, self).__init__(client) + + def get( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a GET request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return self.transport.perform_request( + "GET", url=url, headers=headers, params=params, body=body + ) + + def head( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a HEAD request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return self.transport.perform_request( + "HEAD", url=url, headers=headers, params=params, body=body + ) + + def post( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a POST request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return self.transport.perform_request( + "POST", url=url, headers=headers, params=params, body=body + ) + + def delete( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a DELETE request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return self.transport.perform_request( + "DELETE", url=url, headers=headers, params=params, body=body + ) + + def put( + self, + url: str, + headers: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + ) -> Any: + """ + Perform a PUT request and return the data. + + :arg url: absolute url (without host) to target + :arg headers: dictionary of headers, will be handed over to the + underlying :class:`~opensearchpy.Connection` class + :arg params: dictionary of query parameters, will be handed over to the + underlying :class:`~opensearchpy.Connection` class for serialization + :arg body: body of the request, will be serialized using serializer and + passed to the connection + """ + return self.transport.perform_request( + "PUT", url=url, headers=headers, params=params, body=body + ) diff --git a/samples/json/json-hello-async.py b/samples/json/json-hello-async.py index fbadece6..34a4ca8c 100755 --- a/samples/json/json-hello-async.py +++ b/samples/json/json-hello-async.py @@ -31,7 +31,7 @@ async def main() -> None: ) try: - info = await client.transport.perform_request("GET", "/") + info = await client.http.get("/") print( f"Welcome to {info['version']['distribution']} {info['version']['number']}!" ) @@ -42,11 +42,7 @@ async def main() -> None: index_body = {"settings": {"index": {"number_of_shards": 4}}} - print( - await client.transport.perform_request( - "PUT", f"/{index_name}", body=index_body - ) - ) + print(await client.http.put(f"/{index_name}", body=index_body)) # add a document to the index @@ -55,8 +51,8 @@ async def main() -> None: id = "1" print( - await client.transport.perform_request( - "PUT", f"/{index_name}/_doc/{id}?refresh=true", body=document + await client.http.put( + f"/{index_name}/_doc/{id}?refresh=true", body=document ) ) @@ -69,21 +65,15 @@ async def main() -> None: "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } - print( - await client.transport.perform_request( - "POST", f"/{index_name}/_search", body=query - ) - ) + print(await client.http.post(f"/{index_name}/_search", body=query)) # delete the document - print( - await client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}") - ) + print(await client.http.delete(f"/{index_name}/_doc/{id}")) # delete the index - print(await client.transport.perform_request("DELETE", f"/{index_name}")) + print(await client.http.delete(f"/{index_name}")) finally: await client.close() diff --git a/samples/json/json-hello.py b/samples/json/json-hello.py index 5df36f5f..5b39e41b 100755 --- a/samples/json/json-hello.py +++ b/samples/json/json-hello.py @@ -27,7 +27,7 @@ ssl_show_warn=False, ) -info = client.transport.perform_request("GET", "/") +info = client.http.get("/") print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") # create an index @@ -36,7 +36,7 @@ index_body = {"settings": {"index": {"number_of_shards": 4}}} -print(client.transport.perform_request("PUT", f"/{index_name}", body=index_body)) +print(client.http.put(f"/{index_name}", body=index_body)) # add a document to the index @@ -44,11 +44,7 @@ id = "1" -print( - client.transport.perform_request( - "PUT", f"/{index_name}/_doc/{id}?refresh=true", body=document - ) -) +print(client.http.put(f"/{index_name}/_doc/{id}?refresh=true", body=document)) # search for a document @@ -59,12 +55,12 @@ "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } -print(client.transport.perform_request("POST", f"/{index_name}/_search", body=query)) +print(client.http.post(f"/{index_name}/_search", body=query)) # delete the document -print(client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}")) +print(client.http.delete(f"/{index_name}/_doc/{id}")) # delete the index -print(client.transport.perform_request("DELETE", f"/{index_name}")) +print(client.http.delete(f"/{index_name}")) diff --git a/samples/poetry.lock b/samples/poetry.lock index 55fb558d..2d733513 100644 --- a/samples/poetry.lock +++ b/samples/poetry.lock @@ -1,18 +1,188 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.6" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +optional = false +python-versions = ">=3.5" +files = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "boto3" -version = "1.28.67" +version = "1.28.83" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.67-py3-none-any.whl", hash = "sha256:7d17f987a8b4f804e5ae509a30589736a72c6db7b0e2fb1338997128fdc9a3ec"}, - {file = "boto3-1.28.67.tar.gz", hash = "sha256:8db91c0648c9dcde1cf7fb4c15cd50da1fdef573595a9b9c769a303c7531b9a6"}, + {file = "boto3-1.28.83-py3-none-any.whl", hash = "sha256:1d10691911c4b8b9443d3060257ba32b68b6e3cad0eebbb9f69fd1c52a78417f"}, + {file = "boto3-1.28.83.tar.gz", hash = "sha256:489c4967805b677b7a4030460e4c06c0903d6bc0f6834453611bf87efbd8d8a3"}, ] [package.dependencies] -botocore = ">=1.31.67,<1.32.0" +botocore = ">=1.31.83,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.7.0,<0.8.0" @@ -21,13 +191,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.67" +version = "1.31.83" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.67-py3-none-any.whl", hash = "sha256:487fb6ee4a6612613da370599b1a1aca0e159dd9e94b2e8aaa8e6ad9cc546ded"}, - {file = "botocore-1.31.67.tar.gz", hash = "sha256:ab3b73a2e03efa1c534a94f8db4a5cf45629a53e5478d2d154b0a3e2ffb05249"}, + {file = "botocore-1.31.83-py3-none-any.whl", hash = "sha256:c742069e8bfd06d212d712228258ff09fb481b6ec02358e539381ce0fcad065a"}, + {file = "botocore-1.31.83.tar.gz", hash = "sha256:40914b0fb28f13d709e1f8a4481e278350b77a3987be81acd23715ec8d5fedca"}, ] [package.dependencies] @@ -54,101 +224,184 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, ] [[package]] @@ -162,6 +415,26 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "importlib-metadata" +version = "6.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "jmespath" version = "1.0.1" @@ -173,29 +446,115 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + [[package]] name = "opensearch-py" -version = "2.3.2" +version = "0.0.0" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -files = [ - {file = "opensearch-py-2.3.2.tar.gz", hash = "sha256:96e470b55107fd5bfd873722dc9808c333360eacfa174341f5cc2d021aa30448"}, - {file = "opensearch_py-2.3.2-py2.py3-none-any.whl", hash = "sha256:b1d6607380c8f19d90c142470939d051f0bac96069ce0ac25970b3c39c431f8b"}, -] +files = [] +develop = true [package.dependencies] +aiohttp = {version = ">=3,<4", optional = true, markers = "extra == \"async\""} certifi = ">=2022.12.07" python-dateutil = "*" requests = ">=2.4.0,<3.0.0" six = "*" -urllib3 = ">=1.26.9" +urllib3 = ">=1.26.17" [package.extras] async = ["aiohttp (>=3,<4)"] -develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst-parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] -docs = ["myst-parser", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] -kerberos = ["requests-kerberos"] +develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +kerberos = ["requests_kerberos"] + +[package.source] +type = "directory" +url = ".." [[package]] name = "python-dateutil" @@ -260,6 +619,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + [[package]] name = "urllib3" version = "1.26.18" @@ -293,7 +663,110 @@ secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17. socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "1309989011bed3cb46e36fc451b65f040ef9fe9cecbe3f3706be240d4ea6d52e" +content-hash = "b5b82026a5a053bd7c1cf6f8db578509faded763d01d1ed5aee201ba62c17a0a" diff --git a/test_opensearchpy/test_async/test_client.py b/test_opensearchpy/test_async/test_client.py new file mode 100644 index 00000000..cb74a187 --- /dev/null +++ b/test_opensearchpy/test_async/test_client.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from collections import defaultdict +from typing import Any, Collection, Mapping, Optional, Union + +import pytest + +from opensearchpy import AsyncOpenSearch +from opensearchpy._async.transport import AsyncTransport + +pytestmark = pytest.mark.asyncio + + +class DummyTransport(AsyncTransport): + def __init__(self, hosts: Any, responses: Any = None, **kwargs: Any) -> None: + self.hosts = hosts + self.responses = responses + self.call_count = 0 + self.calls: Any = defaultdict(list) + + async def perform_request( + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + resp: Any = (200, {}) + if self.responses: + resp = self.responses[self.call_count] + self.call_count += 1 + self.calls[(method, url)].append((params, headers, body)) + return resp + + +class OpenSearchTestCaseWithDummyTransport: + def assert_call_count_equals(self, count: int) -> None: + assert isinstance(self.client.transport, DummyTransport) + assert count == self.client.transport.call_count + + def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: + assert isinstance(self.client.transport, DummyTransport) + assert (method, url) in self.client.transport.calls + calls = self.client.transport.calls[(method, url)] + assert count == len(calls) + return calls + + def setup_method(self, method: Any) -> None: + self.client = AsyncOpenSearch(transport_class=DummyTransport) + + +class TestClient(OpenSearchTestCaseWithDummyTransport): + async def test_our_transport_used(self) -> None: + assert isinstance(self.client.transport, DummyTransport) + + async def test_start_with_0_call(self) -> None: + self.assert_call_count_equals(0) + + async def test_each_call_is_recorded(self) -> None: + await self.client.transport.perform_request("GET", "/") + await self.client.transport.perform_request( + "DELETE", "/42", params={}, body="body" + ) + self.assert_call_count_equals(2) + assert [({}, None, "body")] == self.assert_url_called("DELETE", "/42", 1) diff --git a/test_opensearchpy/test_async/test_http.py b/test_opensearchpy/test_async/test_http.py new file mode 100644 index 00000000..510fd165 --- /dev/null +++ b/test_opensearchpy/test_async/test_http.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import pytest +from _pytest.mark.structures import MarkDecorator + +from .test_client import OpenSearchTestCaseWithDummyTransport + +pytestmark: MarkDecorator = pytest.mark.asyncio + + +class TestHttpClient(OpenSearchTestCaseWithDummyTransport): + async def test_head(self) -> None: + await self.client.http.head("/") + self.assert_call_count_equals(1) + assert [(None, None, None)] == self.assert_url_called("HEAD", "/", 1) + + async def test_get(self) -> None: + await self.client.http.get("/") + self.assert_call_count_equals(1) + assert [(None, None, None)] == self.assert_url_called("GET", "/", 1) + + async def test_put(self) -> None: + await self.client.http.put(url="/xyz", params={"X": "Y"}, body="body") + self.assert_call_count_equals(1) + assert [({"X": "Y"}, None, "body")] == self.assert_url_called("PUT", "/xyz", 1) + + async def test_post(self) -> None: + await self.client.http.post(url="/xyz", params={"X": "Y"}, body="body") + self.assert_call_count_equals(1) + assert [({"X": "Y"}, None, "body")] == self.assert_url_called("POST", "/xyz", 1) + + async def test_post_with_headers(self) -> None: + await self.client.http.post( + url="/xyz", headers={"A": "B"}, params={"X": "Y"}, body="body" + ) + self.assert_call_count_equals(1) + assert [({"X": "Y"}, {"A": "B"}, "body")] == self.assert_url_called( + "POST", "/xyz", 1 + ) + + async def test_delete(self) -> None: + await self.client.http.delete(url="/xyz", params={"X": "Y"}, body="body") + self.assert_call_count_equals(1) + assert [({"X": "Y"}, None, "body")] == self.assert_url_called( + "DELETE", "/xyz", 1 + ) diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index e36d9bb6..29bf9394 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -27,7 +27,7 @@ from collections import defaultdict -from typing import Any, Sequence +from typing import Any, Mapping, Optional, Sequence from unittest import SkipTest, TestCase from opensearchpy import OpenSearch @@ -46,9 +46,9 @@ def perform_request( self, method: str, url: str, - params: Any = None, - headers: Any = None, - body: Any = None, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + headers: Optional[Mapping[str, str]] = None, ) -> Any: resp: Any = (200, {}) if self.responses: diff --git a/test_opensearchpy/test_client/test_http.py b/test_opensearchpy/test_client/test_http.py new file mode 100644 index 00000000..7cd168f7 --- /dev/null +++ b/test_opensearchpy/test_client/test_http.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# + +from test_opensearchpy.test_cases import OpenSearchTestCase + + +class TestHttp(OpenSearchTestCase): + def test_http_get(self) -> None: + self.client.http.get("/") + self.assert_call_count_equals(1) + self.assertEqual([(None, None, None)], self.assert_url_called("GET", "/", 1)) + + def test_http_head(self) -> None: + self.client.http.head("/") + self.assert_call_count_equals(1) + self.assertEqual([(None, None, None)], self.assert_url_called("HEAD", "/", 1)) + + def test_http_put(self) -> None: + self.client.http.put("/xyz", headers={"X": "Y"}, body="body") + self.assert_call_count_equals(1) + self.assertEqual( + [(None, {"X": "Y"}, "body")], self.assert_url_called("PUT", "/xyz", 1) + ) + + def test_http_post(self) -> None: + self.client.http.post("/xyz", headers={"X": "Y"}, body="body") + self.assert_call_count_equals(1) + self.assertEqual( + [(None, {"X": "Y"}, "body")], self.assert_url_called("POST", "/xyz", 1) + ) + + def test_http_post_with_params(self) -> None: + self.client.http.post( + "/xyz", headers={"X": "Y"}, params={"A": "B"}, body="body" + ) + self.assert_call_count_equals(1) + self.assertEqual( + [({"A": "B"}, {"X": "Y"}, "body")], + self.assert_url_called("POST", "/xyz", 1), + ) + + def test_http_delete(self) -> None: + self.client.http.delete("/xyz", headers={"X": "Y"}, body="body") + self.assert_call_count_equals(1) + self.assertEqual( + [(None, {"X": "Y"}, "body")], self.assert_url_called("DELETE", "/xyz", 1) + ) From ebe0b4b81a5744c13e0640dc3fd906379f7e283a Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Tue, 14 Nov 2023 14:15:54 -0500 Subject: [PATCH 55/80] Preparing for release, 2.4.0 (#581) * Preparing for release, 2.4.0. Signed-off-by: dblock * Added OpenSearch 2.11. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/integration.yml | 2 +- .github/workflows/unified-release.yml | 2 +- CHANGELOG.md | 4 ++-- COMPATIBILITY.md | 7 +------ benchmarks/poetry.lock | 2 +- opensearchpy/_version.py | 2 +- 6 files changed, 7 insertions(+), 12 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 106e940a..0ca6c823 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0' ] + opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0', '2.11.0' ] secured: [ "true", "false" ] steps: diff --git a/.github/workflows/unified-release.yml b/.github/workflows/unified-release.yml index cddea14a..c2e707db 100644 --- a/.github/workflows/unified-release.yml +++ b/.github/workflows/unified-release.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - stack_version: ['2.3.2'] + stack_version: ['2.4.0'] steps: - name: Checkout diff --git a/CHANGELOG.md b/CHANGELOG.md index 7bb6a9e1..5b8ba707 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ # CHANGELOG Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) -## [Unreleased] +## [2.4.0] ### Added - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) - Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) @@ -171,4 +171,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) [2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 [2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 [2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 -[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 \ No newline at end of file +[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 diff --git a/COMPATIBILITY.md b/COMPATIBILITY.md index 0634f6cc..39dba02b 100644 --- a/COMPATIBILITY.md +++ b/COMPATIBILITY.md @@ -9,12 +9,7 @@ The below matrix shows the compatibility of the [`opensearch-py`](https://pypi.o | --- | --- | --- | | 1.0.0 | 1.0.0-1.2.4 | | | 1.1.0 | 1.3.0-1.3.7 | | -| 2.0.x | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.1.x | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.2.0 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.3.0 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.3.1 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.3.2 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.x.x | 1.0.0-2.11.0 | client works against OpenSearch 1.x as long as features removed in 2.0 are not used | ## Upgrading diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock index d4992d68..a0178934 100644 --- a/benchmarks/poetry.lock +++ b/benchmarks/poetry.lock @@ -515,7 +515,7 @@ files = [ [[package]] name = "opensearch-py" -version = "2.3.2" +version = "2.4.0" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 13c8d5c9..8883d395 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -25,4 +25,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__: str = "2.3.2" +__versionstr__: str = "2.4.0" From 75f0e5ffd158fff04262b0b8c848c081cc1ca508 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Tue, 14 Nov 2023 22:53:02 -0500 Subject: [PATCH 56/80] Remove 2.11. (#583) Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/integration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 0ca6c823..106e940a 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0', '2.11.0' ] + opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0' ] secured: [ "true", "false" ] steps: From 866a8b764978ce09eb981182d97ecb3b17de6bb9 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Wed, 15 Nov 2023 12:31:32 -0500 Subject: [PATCH 57/80] Add 2.11.0 to the test matrix. (#587) Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/integration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 106e940a..0ca6c823 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0' ] + opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0', '2.11.0' ] secured: [ "true", "false" ] steps: From 0531c8d9b4cb891a7557641ef532766bf2f5191b Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Wed, 15 Nov 2023 15:03:06 -0500 Subject: [PATCH 58/80] Fix: version number in filename for distribution. (#588) Signed-off-by: dblock Signed-off-by: roma2023 --- setup.py | 5 ++++- utils/build-dists.py | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index dc613280..6a0a5d13 100644 --- a/setup.py +++ b/setup.py @@ -36,9 +36,12 @@ base_dir = abspath(dirname(__file__)) with open(join(base_dir, package_name.replace("-", ""), "_version.py")) as f: - m = re.search(r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read()) + data = f.read() + m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) if m: package_version = m.group(1) + else: + raise Exception(f"Invalid version: {data}") with open(join(base_dir, "README.md")) as f: long_description = f.read().strip() diff --git a/utils/build-dists.py b/utils/build-dists.py index 569ed7ea..bca9c154 100644 --- a/utils/build-dists.py +++ b/utils/build-dists.py @@ -194,7 +194,7 @@ def main() -> None: if m: version = m.group(1) else: - raise Exception(f"Invalid version {data}") + raise Exception(f"Invalid version: {data}") major_version = version.split(".")[0] @@ -258,8 +258,8 @@ def main() -> None: with open(version_path) as f: version_data = f.read() version_data = re.sub( - r"__versionstr__ = \"[^\"]+\"", - '__versionstr__ = "%s"' % version, + r"__versionstr__: str = \"[^\"]+\"", + '__versionstr__: str = "%s"' % version, version_data, ) with open(version_path, "w") as f: From 6fc07df1884b71617661683e3b79d2a78b9ca721 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Wed, 15 Nov 2023 19:33:43 -0500 Subject: [PATCH 59/80] Fix for 592 (#594) * Prepare for next developer iteration, 2.4.1. Signed-off-by: dblock * Fix: sync opensearchpy without iohttp. Signed-off-by: dblock * Use nox to run tests. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/test.yml | 4 +-- .github/workflows/unified-release.yml | 2 +- CHANGELOG.md | 9 +++++++ RELEASING.md | 2 +- benchmarks/poetry.lock | 2 +- noxfile.py | 8 ++++++ opensearchpy/__init__.py | 36 ++++++++++++++------------- opensearchpy/_version.py | 2 +- opensearchpy/connection/__init__.py | 11 ++++++-- 9 files changed, 51 insertions(+), 25 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f79929bc..40013d62 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,10 +31,10 @@ jobs: PIP_DISABLE_PIP_VERSION_CHECK: 1 - name: Install Dependencies run: | - python -m pip install -r dev-requirements.txt + python -m pip install nox - name: Run Tests run: | - python setup.py test + python -m nox -rs test-${{ matrix.entry.python-version }} - name: Upload coverage to Codecov uses: codecov/codecov-action@v2 with: diff --git a/.github/workflows/unified-release.yml b/.github/workflows/unified-release.yml index c2e707db..d33bdd98 100644 --- a/.github/workflows/unified-release.yml +++ b/.github/workflows/unified-release.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - stack_version: ['2.4.0'] + stack_version: ['2.4.1'] steps: - name: Checkout diff --git a/CHANGELOG.md b/CHANGELOG.md index 5b8ba707..2db5fe0d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +## [2.4.1] +### Added +### Changed +### Deprecated +### Removed +### Fixed +- Fix dependency on `aiohttp` ([#594](https://github.com/opensearch-project/opensearch-py/pull/594)) +### Security + ## [2.4.0] ### Added - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) diff --git a/RELEASING.md b/RELEASING.md index 324c3d55..fdf80f16 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -37,4 +37,4 @@ The release process is standard across repositories in this org and is run by a 1. The [release-drafter.yml](.github/workflows/release-drafter.yml) will be automatically kicked off and a draft release will be created. 1. This draft release triggers the [jenkins release workflow](https://build.ci.opensearch.org/job/opensearch-py-release/) as a result of which opensearch-py client is released on [PyPi](https://pypi.org/project/opensearch-py/). 1. Once the above release workflow is successful, the drafted release on GitHub is published automatically. -1. Increment "version" in [_version.py](./opensearchpy/_version.py) to the next patch release, e.g. v2.1.1. See [example](https://github.com/opensearch-project/opensearch-py/pull/167). \ No newline at end of file +1. Add an "Unreleased" section to CHANGELOG, and increment version to the next patch release, e.g. v2.1.1. See [example](https://github.com/opensearch-project/opensearch-py/pull/593). \ No newline at end of file diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock index a0178934..145d183d 100644 --- a/benchmarks/poetry.lock +++ b/benchmarks/poetry.lock @@ -515,7 +515,7 @@ files = [ [[package]] name = "opensearch-py" -version = "2.4.0" +version = "2.4.1" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" diff --git a/noxfile.py b/noxfile.py index 296ea8a4..510d30c7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -45,6 +45,14 @@ @nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) # type: ignore def test(session: Any) -> None: session.install(".") + # ensure client can be imported without aiohttp + session.run("python", "-c", "import opensearchpy\nprint(opensearchpy.OpenSearch())") + # ensure client can be imported with aiohttp + session.install(".[async]") + session.run( + "python", "-c", "import opensearchpy\nprint(opensearchpy.AsyncOpenSearch())" + ) + session.install("-r", "dev-requirements.txt") session.run("python", "setup.py", "test") diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index e9ef6485..aa528c98 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -44,12 +44,8 @@ logger = logging.getLogger("opensearch") logger.addHandler(logging.NullHandler()) -from ._async.client import AsyncOpenSearch -from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection -from ._async.transport import AsyncTransport from .client import OpenSearch from .connection import ( - AsyncHttpConnection, Connection, RequestsHttpConnection, Urllib3HttpConnection, @@ -76,12 +72,7 @@ UnknownDslObject, ValidationException, ) -from .helpers import ( - AWSV4SignerAsyncAuth, - AWSV4SignerAuth, - RequestsAWSV4SignerAuth, - Urllib3AWSV4SignerAuth, -) +from .helpers import AWSV4SignerAuth, RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth from .helpers.aggs import A from .helpers.analysis import analyzer, char_filter, normalizer, token_filter, tokenizer from .helpers.document import Document, InnerDoc, MetaField @@ -159,7 +150,6 @@ "JSONSerializer", "Connection", "RequestsHttpConnection", - "AsyncHttpConnection", "Urllib3HttpConnection", "ImproperlyConfigured", "OpenSearchException", @@ -178,7 +168,6 @@ "AWSV4SignerAuth", "Urllib3AWSV4SignerAuth", "RequestsAWSV4SignerAuth", - "AWSV4SignerAsyncAuth", "A", "AttrDict", "AttrList", @@ -251,10 +240,23 @@ "normalizer", "token_filter", "tokenizer", - "AIOHttpConnection", - "AsyncConnection", - "AsyncTransport", - "AsyncOpenSearch", - "AsyncHttpConnection", "__versionstr__", ] + +try: + from ._async.client import AsyncOpenSearch + from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection + from ._async.transport import AsyncTransport + from .connection import AsyncHttpConnection + from .helpers import AWSV4SignerAsyncAuth + + __all__ += [ + "AIOHttpConnection", + "AsyncConnection", + "AsyncTransport", + "AsyncOpenSearch", + "AsyncHttpConnection", + "AWSV4SignerAsyncAuth", + ] +except (ImportError, SyntaxError): + pass diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 8883d395..371c642d 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -25,4 +25,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__: str = "2.4.0" +__versionstr__: str = "2.4.1" diff --git a/opensearchpy/connection/__init__.py b/opensearchpy/connection/__init__.py index 40037859..287b7ecb 100644 --- a/opensearchpy/connection/__init__.py +++ b/opensearchpy/connection/__init__.py @@ -27,7 +27,6 @@ from .base import Connection -from .http_async import AsyncHttpConnection from .http_requests import RequestsHttpConnection from .http_urllib3 import Urllib3HttpConnection, create_ssl_context @@ -36,5 +35,13 @@ "RequestsHttpConnection", "Urllib3HttpConnection", "create_ssl_context", - "AsyncHttpConnection", ] + +try: + from .http_async import AsyncHttpConnection + + __all__ += [ + "AsyncHttpConnection", + ] +except (ImportError, SyntaxError): + pass From ad036ac91423e4e3af59dea87a278bed488d1cca Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Wed, 15 Nov 2023 22:53:40 -0500 Subject: [PATCH 60/80] Preparing for next development iteration, 2.4.2. (#597) Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/unified-release.yml | 2 +- CHANGELOG.md | 8 ++++++++ benchmarks/poetry.lock | 2 +- opensearchpy/_version.py | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/unified-release.yml b/.github/workflows/unified-release.yml index d33bdd98..83df042d 100644 --- a/.github/workflows/unified-release.yml +++ b/.github/workflows/unified-release.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - stack_version: ['2.4.1'] + stack_version: ['2.4.2'] steps: - name: Checkout diff --git a/CHANGELOG.md b/CHANGELOG.md index 2db5fe0d..bea060e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,14 @@ # CHANGELOG Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +## [Unreleased] +### Added +### Changed +### Deprecated +### Removed +### Fixed +### Security + ## [2.4.1] ### Added ### Changed diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock index 145d183d..2db2eddd 100644 --- a/benchmarks/poetry.lock +++ b/benchmarks/poetry.lock @@ -515,7 +515,7 @@ files = [ [[package]] name = "opensearch-py" -version = "2.4.1" +version = "2.4.2" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 371c642d..7943a07b 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -25,4 +25,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__: str = "2.4.1" +__versionstr__: str = "2.4.2" From a1ed1abbdea5cf05aa602f27aa5c700a08c88167 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 17 Nov 2023 13:18:42 -0500 Subject: [PATCH 61/80] Fix: TypeError on calling parallel_bulk. (#601) * Fix: TypeError on calling parallel_bulk. Signed-off-by: dblock * Added a sample that uses a bulk function generator. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + guides/bulk.md | 58 +++++++++++++++++++ opensearchpy/helpers/actions.py | 11 +++- samples/bulk/bulk-helpers.py | 52 ++++++++++++++++- .../test_helpers/test_actions.py | 53 ++++++++++++++++- 5 files changed, 172 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bea060e1..794df51c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Deprecated ### Removed ### Fixed +- Fix `TypeError` on `parallel_bulk` ([#601](https://github.com/opensearch-project/opensearch-py/pull/601)) ### Security ## [2.4.1] diff --git a/guides/bulk.md b/guides/bulk.md index ef6b8c5d..251be4f8 100644 --- a/guides/bulk.md +++ b/guides/bulk.md @@ -1,6 +1,8 @@ - [Bulk Indexing](#bulk-indexing) - [Line-Delimited JSON](#line-delimited-json) - [Bulk Helper](#bulk-helper) + - [Parallel Bulk](#parallel-bulk) + - [Data Generator](#data-generator) # Bulk Indexing @@ -46,6 +48,8 @@ data = [ response = client.bulk(data) if response["errors"]: print(f"There were errors!") + for item in response["items"]: + print(f"{item['index']['status']}: {item['index']['error']['type']}") else: print(f"Bulk-inserted {len(rc['items'])} items.") ``` @@ -69,3 +73,57 @@ response = helpers.bulk(client, docs, max_retries=3) print(response) ``` +## Parallel Bulk + +Bulk helpers support `parallel_bulk` which has options to turn off exceptions, chunk size, etc. + +```python +succeeded = [] +failed = [] +for success, item in helpers.parallel_bulk(client, + actions=data, + chunk_size=10, + raise_on_error=False, + raise_on_exception=False, + max_chunk_bytes=20 * 1024 * 1024, + request_timeout=60): + + if success: + succeeded.append(item) + else: + failed.append(item) + +if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(f"{item['index']['error']}: {item['index']['exception']}") + +if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items.") +``` + +## Data Generator + +Use a data generator function with bulk helpers instead of building arrays. + +```python +def _generate_data(): + for i in range(100): + yield {"_index": index_name, "_id": i, "value": i} + +succeeded = [] +failed = [] +for success, item in helpers.parallel_bulk(client, actions=_generate_data()): + if success: + succeeded.append(item) + else: + failed.append(item) + +if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(item["index"]["error"]) + +if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items (streaming_bulk).") +``` \ No newline at end of file diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index 7f8ced35..960d5a1c 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -442,6 +442,8 @@ def parallel_bulk( max_chunk_bytes: int = 100 * 1024 * 1024, queue_size: int = 4, expand_action_callback: Any = expand_action, + raise_on_exception: bool = True, + raise_on_error: bool = True, ignore_status: Any = (), *args: Any, **kwargs: Any @@ -485,7 +487,14 @@ def _setup_queues(self) -> None: for result in pool.imap( lambda bulk_chunk: list( _process_bulk_chunk( - client, bulk_chunk[1], bulk_chunk[0], ignore_status, *args, **kwargs + client, + bulk_chunk[1], + bulk_chunk[0], + raise_on_exception, + raise_on_error, + ignore_status, + *args, + **kwargs ) ), _chunk_actions( diff --git a/samples/bulk/bulk-helpers.py b/samples/bulk/bulk-helpers.py index 3dc165c8..678b2c09 100755 --- a/samples/bulk/bulk-helpers.py +++ b/samples/bulk/bulk-helpers.py @@ -12,6 +12,7 @@ import os +from typing import Any from opensearchpy import OpenSearch, helpers @@ -49,8 +50,57 @@ for i in range(100): data.append({"_index": index_name, "_id": i, "value": i}) +# serialized bulk raising an exception on error rc = helpers.bulk(client, data) -print(f"Bulk-inserted {rc[0]} items.") +print(f"Bulk-inserted {rc[0]} items (bulk).") + +# parallel bulk with explicit error checking +succeeded = [] +failed = [] +for success, item in helpers.parallel_bulk( + client, + actions=data, + chunk_size=10, + raise_on_error=False, + raise_on_exception=False, + max_chunk_bytes=20 * 1024 * 1024, + request_timeout=60, +): + if success: + succeeded.append(item) + else: + failed.append(item) + +if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(item["index"]["error"]) + +if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items (parallel_bulk).") + + +# streaming bulk with a data generator +def _generate_data() -> Any: + for i in range(100): + yield {"_index": index_name, "_id": i, "value": i} + + +succeeded = [] +failed = [] +for success, item in helpers.streaming_bulk(client, actions=_generate_data()): + if success: + succeeded.append(item) + else: + failed.append(item) + +if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(item["index"]["error"]) + +if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items (streaming_bulk).") # delete index client.indices.delete(index=index_name) diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 739e8647..e44dbc98 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -67,11 +67,62 @@ def test_all_chunks_sent(self, _process_bulk_chunk: Any) -> None: self.assertEqual(50, mock_process_bulk_chunk.call_count) # type: ignore + @mock.patch("opensearchpy.OpenSearch.bulk") + def test_with_all_options(self, _bulk: Any) -> None: + actions = ({"x": i} for i in range(100)) + list( + helpers.parallel_bulk( + OpenSearch(), + actions=actions, + chunk_size=2, + raise_on_error=False, + raise_on_exception=False, + max_chunk_bytes=20 * 1024 * 1024, + request_timeout=160, + ignore_status=(123), + ) + ) + + self.assertEqual(50, _bulk.call_count) + _bulk.assert_called_with( + '{"index":{}}\n{"x":98}\n{"index":{}}\n{"x":99}\n', request_timeout=160 + ) + + @mock.patch("opensearchpy.helpers.actions._process_bulk_chunk") + def test_process_bulk_chunk_with_all_options( + self, _process_bulk_chunk: Any + ) -> None: + actions = ({"x": i} for i in range(100)) + client = OpenSearch() + list( + helpers.parallel_bulk( + client, + actions=actions, + chunk_size=2, + raise_on_error=True, + raise_on_exception=True, + max_chunk_bytes=20 * 1024 * 1024, + request_timeout=160, + ignore_status=(123), + ) + ) + + self.assertEqual(50, _process_bulk_chunk.call_count) + _process_bulk_chunk.assert_called_with( + client, + ['{"index":{}}', '{"x":98}', '{"index":{}}', '{"x":99}'], + [({"index": {}}, {"x": 98}), ({"index": {}}, {"x": 99})], + True, + True, + 123, + request_timeout=160, + ) + @pytest.mark.skip # type: ignore @mock.patch( "opensearchpy.helpers.actions._process_bulk_chunk", # make sure we spend some time in the thread - side_effect=lambda *a: [ + side_effect=lambda *args, **kwargs: [ (True, time.sleep(0.001) or threading.current_thread().ident) # type: ignore ], ) From 7b4d93b49cc6ce3f157c2569b81b8325d62ccbf6 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 17 Nov 2023 16:09:19 -0500 Subject: [PATCH 62/80] Fix Amazon OpenSearch Serverless integration with LangChain. (#603) Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/helpers/signer.py | 2 ++ .../test_connection/test_requests_http_connection.py | 2 ++ .../test_connection/test_urllib3_http_connection.py | 2 ++ 4 files changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 794df51c..12aa24ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Removed ### Fixed - Fix `TypeError` on `parallel_bulk` ([#601](https://github.com/opensearch-project/opensearch-py/pull/601)) +- Fix Amazon OpenSearch Serverless integration with LangChain ([#603](https://github.com/opensearch-project/opensearch-py/pull/603)) ### Security ## [2.4.1] diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index 930b8d25..43b5ee3c 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -78,6 +78,7 @@ class RequestsAWSV4SignerAuth(requests.auth.AuthBase): def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore self.signer = AWSV4Signer(credentials, region, service) + self.service = service # tools like LangChain rely on this, see https://github.com/opensearch-project/opensearch-py/issues/600 def __call__(self, request): # type: ignore return self._sign_request(request) # type: ignore @@ -133,6 +134,7 @@ class AWSV4SignerAuth(RequestsAWSV4SignerAuth): class Urllib3AWSV4SignerAuth(Callable): # type: ignore def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore self.signer = AWSV4Signer(credentials, region, service) + self.service = service # tools like LangChain rely on this, see https://github.com/opensearch-project/opensearch-py/issues/600 def __call__(self, method: str, url: str, body: Any) -> Dict[str, str]: return self.signer.sign(method, url, body) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index bdfb97d7..62adf39f 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -460,6 +460,7 @@ def test_aws_signer_as_http_auth(self) -> None: from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth auth = RequestsAWSV4SignerAuth(self.mock_session(), region) + self.assertEqual(auth.service, "es") con = RequestsHttpConnection(http_auth=auth) prepared_request = requests.Request("GET", "http://localhost").prepare() auth(prepared_request) @@ -478,6 +479,7 @@ def test_aws_signer_when_service_is_specified(self) -> None: from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth auth = RequestsAWSV4SignerAuth(self.mock_session(), region, service) + self.assertEqual(auth.service, service) con = RequestsHttpConnection(http_auth=auth) prepared_request = requests.Request("GET", "http://localhost").prepare() auth(prepared_request) diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index e22e943f..971a3254 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -192,6 +192,7 @@ def test_aws_signer_as_http_auth_adds_headers(self, mock_open: Any) -> None: from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") + self.assertEqual(auth.service, "es") con = Urllib3HttpConnection(http_auth=auth, headers={"x": "y"}) con.perform_request("GET", "/") self.assertEqual(mock_open.call_count, 1) @@ -249,6 +250,7 @@ def test_aws_signer_when_service_is_specified(self) -> None: from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth auth = Urllib3AWSV4SignerAuth(self.mock_session(), region, service) + self.assertEqual(auth.service, service) headers = auth("GET", "http://localhost", None) self.assertIn("Authorization", headers) self.assertIn("X-Amz-Date", headers) From f956d9136073f1b8d456ca1c33abd5a47e993f6f Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 17 Nov 2023 16:54:54 -0500 Subject: [PATCH 63/80] Fixes a suspicious setattr that should be taking an Any (#604) * Fix invalid value type. Signed-off-by: dblock * Workaround Incompatible types in assignment (expression has type float, variable has type Double) [assignment] Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/helpers/utils.py | 2 +- .../test_helpers/test_document.py | 27 ++++++++++++++++++- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 12aa24ff..ddf456d7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Fixed - Fix `TypeError` on `parallel_bulk` ([#601](https://github.com/opensearch-project/opensearch-py/pull/601)) - Fix Amazon OpenSearch Serverless integration with LangChain ([#603](https://github.com/opensearch-project/opensearch-py/pull/603)) +- Fix type of `Field.__setattr__` ([604](https://github.com/opensearch-project/opensearch-py/pull/604)) ### Security ## [2.4.1] diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 2a9f19da..a27ec9a4 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -309,7 +309,7 @@ def __eq__(self, other: Any) -> bool: def __ne__(self, other: Any) -> bool: return not self == other - def __setattr__(self, name: str, value: Optional[bool]) -> None: + def __setattr__(self, name: str, value: Any) -> None: if name.startswith("_"): return super(DslBase, self).__setattr__(name, value) return self._setattr(name, value) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index e1b5e5c4..1a156ad8 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -32,7 +32,7 @@ import pickle from datetime import datetime from hashlib import sha256 -from typing import Any +from typing import Any, Union from pytest import raises @@ -648,3 +648,28 @@ class MySubDocWithNested(MyDoc): }, "title": {"type": "keyword"}, } + + +def test_save_double(mock_client: Any) -> None: + class MyDocumentWithDouble(MyDoc): + a_double: Union[float, field.Double] = field.Double() + + def save( + self, + using: Any = None, + index: Any = None, + validate: bool = True, + skip_empty: bool = True, + return_doc_meta: bool = False, + **kwargs: Any, + ) -> Any: + if not self.a_double: + self.a_double = 3.14159265359 + return super().save( + using, index, validate, skip_empty, return_doc_meta, **kwargs + ) + + md: Any = MyDocumentWithDouble() + with raises(ValidationException): + md.save(using="mock") + assert md.a_double == 3.14159265359 From 8e94496d978cf4c35e78ddaf1283054703476a17 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Sun, 19 Nov 2023 18:18:43 -0500 Subject: [PATCH 64/80] Preparing for next developer iteration, 2.4.3. (#606) Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/unified-release.yml | 2 +- CHANGELOG.md | 25 ++++++++++++++++++------- benchmarks/poetry.lock | 2 +- opensearchpy/_version.py | 2 +- 4 files changed, 21 insertions(+), 10 deletions(-) diff --git a/.github/workflows/unified-release.yml b/.github/workflows/unified-release.yml index 83df042d..8a812578 100644 --- a/.github/workflows/unified-release.yml +++ b/.github/workflows/unified-release.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - stack_version: ['2.4.2'] + stack_version: ['2.4.3'] steps: - name: Checkout diff --git a/CHANGELOG.md b/CHANGELOG.md index ddf456d7..8c487e3a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,14 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Deprecated ### Removed ### Fixed +### Security + +## [2.4.2] +### Added +### Changed +### Deprecated +### Removed +### Fixed - Fix `TypeError` on `parallel_bulk` ([#601](https://github.com/opensearch-project/opensearch-py/pull/601)) - Fix Amazon OpenSearch Serverless integration with LangChain ([#603](https://github.com/opensearch-project/opensearch-py/pull/603)) - Fix type of `Field.__setattr__` ([604](https://github.com/opensearch-project/opensearch-py/pull/604)) @@ -184,11 +192,14 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Fixed Wrong return type hint in `async_scan` ([520](https://github.com/opensearch-project/opensearch-py/pull/520)) ### Security -[Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.2...HEAD -[2.0.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.0...v2.0.1 -[2.1.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.1...v2.1.0 -[2.1.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.0...v2.1.1 -[2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 -[2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 -[2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 +[Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.4.2...HEAD +[2.4.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.4.0...v2.4.2 +[2.4.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.4.0...v2.4.1 +[2.4.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.2...v2.4.0 [2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 +[2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 +[2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 +[2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 +[2.1.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.0...v2.1.1 +[2.1.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.1...v2.1.0 +[2.0.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.0...v2.0.1 diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock index 2db2eddd..a598d001 100644 --- a/benchmarks/poetry.lock +++ b/benchmarks/poetry.lock @@ -515,7 +515,7 @@ files = [ [[package]] name = "opensearch-py" -version = "2.4.2" +version = "2.4.3" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 7943a07b..204e14a3 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -25,4 +25,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__: str = "2.4.2" +__versionstr__: str = "2.4.3" From 7fc5a01886361df188cb9062f08bc956cf0be815 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Tue, 21 Nov 2023 13:04:39 -0500 Subject: [PATCH 65/80] Added pylint, enforce naming. (#590) * Added pylint. Signed-off-by: dblock * Enforce pylint:invalid-name. Signed-off-by: dblock * Updated the generated code header to prevent broken links. Signed-off-by: dblock * Swapped order of messages. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- .ci/make.sh | 2 +- CHANGELOG.md | 1 + DEVELOPER_GUIDE.md | 10 +- benchmarks/bench_async.py | 33 +- benchmarks/bench_info_sync.py | 45 +-- benchmarks/bench_sync.py | 51 +-- benchmarks/poetry.lock | 324 +++++++++--------- benchmarks/pyproject.toml | 2 +- guides/async.md | 2 +- guides/bulk.md | 6 +- guides/plugins/knn.md | 14 +- noxfile.py | 17 +- opensearchpy/_async/client/__init__.py | 13 +- opensearchpy/_async/client/cat.py | 13 +- opensearchpy/_async/client/cluster.py | 13 +- .../_async/client/dangling_indices.py | 13 +- opensearchpy/_async/client/indices.py | 13 +- opensearchpy/_async/client/ingest.py | 13 +- opensearchpy/_async/client/nodes.py | 13 +- opensearchpy/_async/client/remote_store.py | 13 +- opensearchpy/_async/client/security.py | 13 +- opensearchpy/_async/client/snapshot.py | 13 +- opensearchpy/_async/client/tasks.py | 13 +- opensearchpy/_async/client/utils.py | 4 +- opensearchpy/_async/helpers/index.py | 2 +- opensearchpy/client/__init__.py | 13 +- opensearchpy/client/cat.py | 13 +- opensearchpy/client/cluster.py | 13 +- opensearchpy/client/dangling_indices.py | 13 +- opensearchpy/client/indices.py | 13 +- opensearchpy/client/ingest.py | 13 +- opensearchpy/client/nodes.py | 13 +- opensearchpy/client/remote_store.py | 13 +- opensearchpy/client/security.py | 13 +- opensearchpy/client/snapshot.py | 13 +- opensearchpy/client/tasks.py | 13 +- opensearchpy/compat.py | 2 +- opensearchpy/connection/base.py | 4 +- opensearchpy/helpers/aggs.py | 4 +- opensearchpy/helpers/function.py | 2 +- opensearchpy/helpers/index.py | 4 +- opensearchpy/helpers/query.py | 4 +- .../advanced_index_actions_sample.py | 4 +- samples/aws/README.md | 2 +- samples/aws/search-requests.py | 69 ---- samples/aws/search-urllib3.py | 69 ---- samples/aws/search_requests.py | 75 ++++ samples/aws/search_urllib3.py | 75 ++++ samples/bulk/bulk-array.py | 63 ---- samples/bulk/bulk-helpers.py | 106 ------ samples/bulk/bulk-ld.py | 63 ---- samples/bulk/bulk_array.py | 69 ++++ samples/bulk/bulk_helpers.py | 110 ++++++ samples/bulk/bulk_ld.py | 69 ++++ .../document_lifecycle_sample.py | 161 +++++---- samples/hello/hello.py | 78 +++-- .../hello/{hello-async.py => hello_async.py} | 0 .../index_template/index_template_sample.py | 214 ++++++------ samples/json/json-hello.py | 66 ---- samples/json/json_hello.py | 72 ++++ ...son-hello-async.py => json_hello_async.py} | 0 samples/knn/knn-basics.py | 82 ----- samples/knn/knn-boolean-filter.py | 92 ----- samples/knn/knn-efficient-filter.py | 180 ---------- ...nn-async-basics.py => knn_async_basics.py} | 0 samples/knn/knn_basics.py | 88 +++++ samples/knn/knn_boolean_filter.py | 98 ++++++ samples/knn/knn_efficient_filter.py | 186 ++++++++++ samples/security/roles.py | 86 ++--- samples/security/users.py | 46 +-- setup.cfg | 15 +- setup.py | 20 +- .../test_async/test_connection.py | 2 +- .../test_async/test_helpers/test_document.py | 30 +- .../test_async/test_server/__init__.py | 4 +- .../test_security_plugin.py | 4 +- .../test_async/test_transport.py | 6 +- .../test_requests_http_connection.py | 2 +- test_opensearchpy/test_helpers/test_aggs.py | 12 +- .../test_helpers/test_document.py | 30 +- test_opensearchpy/test_helpers/test_query.py | 20 +- test_opensearchpy/test_helpers/test_utils.py | 2 +- ...{TestHttpServer.py => test_http_server.py} | 14 +- .../test_server/test_rest_api_spec.py | 129 +++---- test_opensearchpy/test_transport.py | 6 +- utils/{build-dists.py => build_dists.py} | 48 +-- utils/{generate-api.py => generate_api.py} | 57 +-- utils/generated_file_headers.txt | 13 +- ...{license-headers.py => license_headers.py} | 12 +- 89 files changed, 1794 insertions(+), 1674 deletions(-) delete mode 100644 samples/aws/search-requests.py delete mode 100644 samples/aws/search-urllib3.py create mode 100644 samples/aws/search_requests.py create mode 100644 samples/aws/search_urllib3.py delete mode 100755 samples/bulk/bulk-array.py delete mode 100755 samples/bulk/bulk-helpers.py delete mode 100755 samples/bulk/bulk-ld.py create mode 100755 samples/bulk/bulk_array.py create mode 100755 samples/bulk/bulk_helpers.py create mode 100755 samples/bulk/bulk_ld.py rename samples/hello/{hello-async.py => hello_async.py} (100%) delete mode 100755 samples/json/json-hello.py create mode 100755 samples/json/json_hello.py rename samples/json/{json-hello-async.py => json_hello_async.py} (100%) delete mode 100755 samples/knn/knn-basics.py delete mode 100755 samples/knn/knn-boolean-filter.py delete mode 100755 samples/knn/knn-efficient-filter.py rename samples/knn/{knn-async-basics.py => knn_async_basics.py} (100%) create mode 100755 samples/knn/knn_basics.py create mode 100755 samples/knn/knn_boolean_filter.py create mode 100755 samples/knn/knn_efficient_filter.py rename test_opensearchpy/{TestHttpServer.py => test_http_server.py} (82%) rename utils/{build-dists.py => build_dists.py} (89%) rename utils/{generate-api.py => generate_api.py} (93%) rename utils/{license-headers.py => license_headers.py} (90%) diff --git a/.ci/make.sh b/.ci/make.sh index c1ab9fba..648f4c72 100755 --- a/.ci/make.sh +++ b/.ci/make.sh @@ -131,7 +131,7 @@ if [[ "$CMD" == "assemble" ]]; then docker run \ --rm -v $repo/.ci/output:/code/opensearch-py/dist \ $product \ - /bin/bash -c "python /code/opensearch-py/utils/build-dists.py $VERSION" + /bin/bash -c "python /code/opensearch-py/utils/build_dists.py $VERSION" # Verify that there are dists in .ci/output if compgen -G ".ci/output/*" > /dev/null; then diff --git a/CHANGELOG.md b/CHANGELOG.md index 8c487e3a..891157b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added +- Added pylint, enforcing `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) ### Changed ### Deprecated ### Removed diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index af281c39..ee788e49 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -99,7 +99,15 @@ Note that integration tests require docker to be installed and running, and down ## Linter -Run the linter and test suite to ensure your changes do not break existing code. The following will auto-format your changes. +This library uses a combination of [pylint](https://github.com/pylint-dev/pylint), [black](https://github.com/psf/black), and [isort](https://github.com/PyCQA/isort) to enforce some consistency in code formatting or naming conventions. + +Run the linters to ensure your changes do not break existing conventions. + +``` +$ nox -rs lint +``` + +Use a formatter to auto-correct some common problems. ``` $ nox -rs format diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index baeb7d80..02c0a238 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -16,14 +16,8 @@ from opensearchpy import AsyncHttpConnection, AsyncOpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") -index_name = "test-index-async" -item_count = 100 - -async def index_records(client: Any, item_count: int) -> None: +async def index_records(client: Any, index_name: str, item_count: int) -> None: await asyncio.gather( *[ client.index( @@ -41,6 +35,11 @@ async def index_records(client: Any, item_count: int) -> None: async def test_async(client_count: int = 1, item_count: int = 1) -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") + index_name = "test-index-async" + clients = [] for i in range(client_count): clients.append( @@ -61,7 +60,10 @@ async def test_async(client_count: int = 1, item_count: int = 1) -> None: await clients[0].indices.create(index_name) await asyncio.gather( - *[index_records(clients[i], item_count) for i in range(client_count)] + *[ + index_records(clients[i], index_name, item_count) + for i in range(client_count) + ] ) await clients[0].indices.refresh(index=index_name) @@ -79,28 +81,31 @@ def test(item_count: int = 1, client_count: int = 1) -> None: loop.close() +ITEM_COUNT = 100 + + def test_1() -> None: - test(1, 32 * item_count) + test(1, 32 * ITEM_COUNT) def test_2() -> None: - test(2, 16 * item_count) + test(2, 16 * ITEM_COUNT) def test_4() -> None: - test(4, 8 * item_count) + test(4, 8 * ITEM_COUNT) def test_8() -> None: - test(8, 4 * item_count) + test(8, 4 * ITEM_COUNT) def test_16() -> None: - test(16, 2 * item_count) + test(16, 2 * ITEM_COUNT) def test_32() -> None: - test(32, item_count) + test(32, ITEM_COUNT) __benchmarks__ = [(test_1, test_8, "1 client vs. more clients (async)")] diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 0c69a102..bc891067 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -20,22 +20,6 @@ from opensearchpy import OpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") -request_count = 250 - - -root = logging.getLogger() -# root.setLevel(logging.DEBUG) -# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) - -handler = logging.StreamHandler(sys.stdout) -handler.setLevel(logging.DEBUG) -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -handler.setFormatter(formatter) -root.addHandler(handler) - def get_info(client: Any, request_count: int) -> float: tt: float = 0 @@ -48,6 +32,22 @@ def get_info(client: Any, request_count: int) -> float: def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") + + root = logging.getLogger() + # root.setLevel(logging.DEBUG) + # logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + handler.setFormatter(formatter) + root.addHandler(handler) + clients = [] for i in range(client_count): clients.append( @@ -76,24 +76,27 @@ def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) - print(f"latency={latency}") +REQUEST_COUNT = 250 + + def test_1() -> None: - test(1, 32 * request_count, 1) + test(1, 32 * REQUEST_COUNT, 1) def test_2() -> None: - test(2, 16 * request_count, 2) + test(2, 16 * REQUEST_COUNT, 2) def test_4() -> None: - test(4, 8 * request_count, 3) + test(4, 8 * REQUEST_COUNT, 3) def test_8() -> None: - test(8, 4 * request_count, 8) + test(8, 4 * REQUEST_COUNT, 8) def test_32() -> None: - test(32, request_count, 32) + test(32, REQUEST_COUNT, 32) __benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 004fa2e4..7b4695eb 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -21,24 +21,8 @@ from opensearchpy import OpenSearch, Urllib3HttpConnection -host = "localhost" -port = 9200 -auth = ("admin", "admin") -index_name = "test-index-sync" -item_count = 1000 -root = logging.getLogger() -# root.setLevel(logging.DEBUG) -# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) - -handler = logging.StreamHandler(sys.stdout) -handler.setLevel(logging.DEBUG) -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -handler.setFormatter(formatter) -root.addHandler(handler) - - -def index_records(client: Any, item_count: int) -> Any: +def index_records(client: Any, index_name: str, item_count: int) -> Any: tt = 0 for n in range(10): data: Any = [] @@ -65,6 +49,23 @@ def index_records(client: Any, item_count: int) -> Any: def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") + index_name = "test-index-sync" + + root = logging.getLogger() + # root.setLevel(logging.DEBUG) + # logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + handler.setFormatter(formatter) + root.addHandler(handler) + clients = [] for i in range(client_count): clients.append( @@ -96,7 +97,8 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N threads = [] for thread_id in range(thread_count): thread = ThreadWithReturnValue( - target=index_records, args=[clients[thread_id % len(clients)], item_count] + target=index_records, + args=[clients[thread_id % len(clients)], index_name, item_count], ) threads.append(thread) thread.start() @@ -113,24 +115,27 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N print(f"{count}, latency={latency}") +ITEM_COUNT = 1000 + + def test_1() -> None: - test(1, 32 * item_count, 1) + test(1, 32 * ITEM_COUNT, 1) def test_2() -> None: - test(2, 16 * item_count, 2) + test(2, 16 * ITEM_COUNT, 2) def test_4() -> None: - test(4, 8 * item_count, 3) + test(4, 8 * ITEM_COUNT, 3) def test_8() -> None: - test(8, 4 * item_count, 8) + test(8, 4 * ITEM_COUNT, 8) def test_32() -> None: - test(32, item_count, 32) + test(32, ITEM_COUNT, 32) __benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock index a598d001..d4c2e4a8 100644 --- a/benchmarks/poetry.lock +++ b/benchmarks/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. [[package]] name = "aiohttp" @@ -183,101 +183,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -528,12 +528,12 @@ certifi = ">=2022.12.07" python-dateutil = "*" requests = ">=2.4.0,<3.0.0" six = "*" -urllib3 = ">=1.26.9" +urllib3 = ">=1.26.18" [package.extras] async = ["aiohttp (>=3,<4)"] -develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] -docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +develop = ["black", "botocore", "coverage (<8.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +docs = ["aiohttp (>=3,<4)", "myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] kerberos = ["requests_kerberos"] [package.source] @@ -556,71 +556,71 @@ plugins = ["importlib-metadata"] [[package]] name = "pyinstrument" -version = "4.6.0" +version = "4.6.1" description = "Call stack profiler for Python. Shows you why your code is slow!" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:679b5397e3e6c0d6f56df50ba8c683543df4f1f7c1df2e2eb728e275bde2c85b"}, - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:18479ffa0c922695ba2befab29521b62bfe75debef48d818cea46262cee48a1e"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daba103955d0d0b37b8bc20a4e8cc6477e839ce5984478fcf3f7cee8318e9636"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d93451e9c7650629b0bc12caa7390f81d1a15835c07f7dc170e953d4684ed1e7"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01009a7b58a6f11bf5560c23848ea2881acac974b0841fe5d365ef154baabd6f"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:288ea44da6333dacc77b4ba2149dba3dc1e9fbbebd3d5dc51a66c20839d80ef3"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecc106213146dd90659a1483047b3a1c2e174fb190c0e109234e524a4651e377"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cd8ab30c8dcd1511e9b3b98f601f17f2c5c9df1d28f8298d215c63d68919bdc"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win32.whl", hash = "sha256:40e3656e6ace5a140880bd980a25f6a356c094c36e28ed1bf935d7349a78b1b6"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9623fc3fde47ae90ad5014737e37034b4abc3fbfb455b7b56cc095f9037d5af"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:beaaa3b647b3a4cbd34b71eacaa31e3eb90e1bf53e15ada3ac7e9df09d737239"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c69ab570609ac93b5f4ab2e5ccbf8add4f69a962b06307eea66ba65b5ad9d38"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5992748a74ec7ff445e4b56b5e316673c34b6cdbd3755111f7c023d8a141f001"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb1ba76c4e912cae159ab9729c7b31bb6d7fe8ed1f0fafce74484a4bb159c240"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:674868ebc3663b01d7d059a6f5cdeff6f18b49e217617720a5d645a6b55ead03"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:514a0ced357ff400988f599b0294d05e3b68468f9ab876f204bf12765f7fdb1b"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ccd1f5b4ad35c734dcf2d08d80b5b37205b4e84aa71fe76f95e43bd30c5eef9"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:611c6cd33f42f19e46d99eeef3b84a47d33fe34cdb0ce6e3635d2ee5038706a3"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win32.whl", hash = "sha256:d20b5cf79bca1b3d425a7362457621741393b1d5ce2d920583541b947bc8a368"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ecd8cf03b04dc1b7f151896228993c6aa0fa897cdd517ea127465bc1c826c5b5"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3d4bed520c0f689a75bca4951f6b7fbad96851e8461086c98e03eb726f8a412a"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b74745f1d22133da8d4a38dd0c78c02c00154a5b7683bdd5df56a7c7705a979b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ab698400e8401597e39c4816efa247f2b98c9b4e59e3ec25d534ae6887bd93"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de1a36a083b324dafe5e2880e5e04267a1983beb027f12c3dc361ddbe3acf9af"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8adc4f87d4289c1f04f19451b5133b8e307bd9b08c364c48e007ba663fefbf1b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08fbc75d3615be6259b7af0c173c7bc48acb6e7bd758678d54eb411ba2903052"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d86fea6ce117bcff642e24208eb573c00d78b4c2934eb9bd5f915751980cc9bd"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23a3b21373e0c8bf0d00dda79989fcab0bb1d30094f7b210d40d2226fe20e141"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win32.whl", hash = "sha256:a498c82d93621c5cf736e4660142ac0c3bbcb7b059bcbd4278a6364037128656"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:9116154446b9999f6524e9db29310aee6476a5a471c276928f2b46b6655a2dcc"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704c6d38abef8fca2e1085756c9574ea180f7ac866aab6943b483152c2828c2a"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbebdc11d4fc6f3123c046d84db88c7f605d53247e3f357314d0c5775d1beaf4"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7a7bae4cce5f8d084153857cedbce29ca8274c9924884d0461a5db48619c5d"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03289b10715e261a5c33b267d0a430d1b408f929922fde0a9fd311835c60351b"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7f83544ff9abfacdf64b39498ca3dcd454956e44aedb5f67626b7212291c9160"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:40640f02fe7865540e8a1e51bf7f9d2403e3364c3b7edfdb9dae5eb5596811da"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f3719464888d7303e1081996bc56ab75ef5cdf7ef69ccbb7b29f48eb37d8f8b9"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:46e16de6bd3b74ef01b6457d862fee751515315edb5e9283205e45299a29ac49"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9ded87ae11cb0a95a767c817908833ec0821fe0e81650968b201a031edf4bc15"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8bf16e459a868d9dbaacff4f0a0acd6ad78ce36f2aceabf21e9fd0c3b6aca0d4"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb83e445795431c3d867b298c0583ee27717bbc50e5120a4c98575c979ab3ab8"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29072b1be183e173d7b0f12caf29f8717d273afbf34df950f5fa0d98127cd3fb"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09502af2a383c59e5a0d3bebfab7e5845f79122348358e9e52b2b0187db84a44"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23c982eb9c4d2f8fe553dacb9bdc0991170a0998b94c84f75c2a052e8af4c74"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f7a38ef482f2151393e729c5582191e4ab05f0ed1fa56b16c2377ff3129107af"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e983e16c2fdfb752387133380859c3414e119e41c14f39f5f869f29dcf6e995c"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d00c87e5cea48a562d67f0436999463b7989cff2e4c196b0e8ba06d515f191a9"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win32.whl", hash = "sha256:a24c95cabf2ca5d79b62dbc8ff17749768b8aafd777841352f59f4ffd6688782"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3d88b66dbbcdc6e4c57bd8574ad9d096cd23285eee0f4a5cf74f0e0df6aa190"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2bcfec45cdbb9edf6d5853debac4a792de589e621be07a71dc76acb36e144a3a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e790515a22844bbccaa388c7715b037c45a8d0155c4a6f2990659998a8920501"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93a30e0d93633a28d4adcf7d7e2d158d6331809b95c2c4a155da17ea1e43eaa3"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa554eb8ef1c54849dbf480965b073f39b39b517e466ce241808a00398f9742a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e289898c644cbbb61d931bbcb6505e2a279ad1122612c9098bfb0958ebf5764"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20ce0f1612a019888a6b94fa7f1e7862842f0b5219282e3354d5b35aceb363f6"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4935f3cdb9062fceac65c50de76f07e05cf630bd3a9c663fedc9e88b5efe7d7c"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dc9c4577ef4b06ae1592c920d0a4f0f0db587a16f530c629ad93e125bc79ebb7"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win32.whl", hash = "sha256:3ec6b04d8cfb34aec48de7fa77aeb919e8e7e19909740ab7a5553339f6f4c53a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a6d2e5c15f989629fac41536ec2ca1fe81359fadf4dadf2ff24fe96b389f6df"}, - {file = "pyinstrument-4.6.0.tar.gz", hash = "sha256:3e509e879c853dbc5fdc1757f0cfdbf8bee899c80f53d504a7df28898f0fa8ed"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73476e4bc6e467ac1b2c3c0dd1f0b71c9061d4de14626676adfdfbb14aa342b4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4d1da8efd974cf9df52ee03edaee2d3875105ddd00de35aa542760f7c612bdf7"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507be1ee2f2b0c9fba74d622a272640dd6d1b0c9ec3388b2cdeb97ad1e77125f"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cee6de08eb45754ef4f602ce52b640d1c535d934a6a8733a974daa095def37"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7873e8cec92321251fdf894a72b3c78f4c5c20afdd1fef0baf9042ec843bb04"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a242f6cac40bc83e1f3002b6b53681846dfba007f366971db0bf21e02dbb1903"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:97c9660cdb4bd2a43cf4f3ab52cffd22f3ac9a748d913b750178fb34e5e39e64"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e304cd0723e2b18ada5e63c187abf6d777949454c734f5974d64a0865859f0f4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win32.whl", hash = "sha256:cee21a2d78187dd8a80f72f5d0f1ddb767b2d9800f8bb4d94b6d11f217c22cdb"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:2000712f71d693fed2f8a1c1638d37b7919124f367b37976d07128d49f1445eb"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a366c6f3dfb11f1739bdc1dee75a01c1563ad0bf4047071e5e77598087df457f"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6be327be65d934796558aa9cb0f75ce62ebd207d49ad1854610c97b0579ad47"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e160d9c5d20d3e4ef82269e4e8b246ff09bdf37af5fb8cb8ccca97936d95ad6"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ffbf56605ef21c2fcb60de2fa74ff81f417d8be0c5002a407e414d6ef6dee43"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92cc4924596d6e8f30a16182bbe90893b1572d847ae12652f72b34a9a17c24a"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f4b48a94d938cae981f6948d9ec603bab2087b178d2095d042d5a48aabaecaab"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7a386392275bdef4a1849712dc5b74f0023483fca14ef93d0ca27d453548982"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:871b131b83e9b1122f2325061c68ed1e861eebcb568c934d2fb193652f077f77"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win32.whl", hash = "sha256:8d8515156dd91f5652d13b5fcc87e634f8fe1c07b68d1d0840348cdd50bf5ace"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb868fbe089036e9f32525a249f4c78b8dc46967612393f204b8234f439c9cc4"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a18cd234cce4f230f1733807f17a134e64a1f1acabf74a14d27f583cf2b183df"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:574cfca69150be4ce4461fb224712fbc0722a49b0dc02fa204d02807adf6b5a0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e02cf505e932eb8ccf561b7527550a67ec14fcae1fe0e25319b09c9c166e914"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832fb2acef9d53701c1ab546564c45fb70a8770c816374f8dd11420d399103c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb57e9607545623ebe462345b3d0c4caee0125d2d02267043ece8aca8f4ea0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9be89e7419bcfe8dd6abb0d959d6d9c439c613a4a873514c43d16b48dae697c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:476785cfbc44e8e1b1ad447398aa3deae81a8df4d37eb2d8bbb0c404eff979cd"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e9cebd90128a3d2fee36d3ccb665c1b9dce75261061b2046203e45c4a8012d54"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win32.whl", hash = "sha256:1d0b76683df2ad5c40eff73607dc5c13828c92fbca36aff1ddf869a3c5a55fa6"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:c4b7af1d9d6a523cfbfedebcb69202242d5bd0cb89c4e094cc73d5d6e38279bd"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79ae152f8c6a680a188fb3be5e0f360ac05db5bbf410169a6c40851dfaebcce9"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cad2745964c174c65aa75f1bf68a4394d1b4d28f33894837cfd315d1e836f0"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb81f66f7f94045d723069cf317453d42375de9ff3c69089cf6466b078ac1db4"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab30ae75969da99e9a529e21ff497c18fdf958e822753db4ae7ed1e67094040"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f36cb5b644762fb3c86289324bbef17e95f91cd710603ac19444a47f638e8e96"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8b45075d9dbbc977dbc7007fb22bb0054c6990fbe91bf48dd80c0b96c6307ba7"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:475ac31477f6302e092463896d6a2055f3e6abcd293bad16ff94fc9185308a88"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:29172ab3d8609fdf821c3f2562dc61e14f1a8ff5306607c32ca743582d3a760e"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:bd176f297c99035127b264369d2bb97a65255f65f8d4e843836baf55ebb3cee4"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:23e9b4526978432e9999021da9a545992cf2ac3df5ee82db7beb6908fc4c978c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2dbcaccc9f456ef95557ec501caeb292119c24446d768cb4fb43578b0f3d572c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2097f63c66c2bc9678c826b9ff0c25acde3ed455590d9dcac21220673fe74fbf"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:205ac2e76bd65d61b9611a9ce03d5f6393e34ec5b41dd38808f25d54e6b3e067"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f414ddf1161976a40fc0a333000e6a4ad612719eac0b8c9bb73f47153187148"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65e62ebfa2cd8fb57eda90006f4505ac4c70da00fc2f05b6d8337d776ea76d41"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d96309df4df10be7b4885797c5f69bb3a89414680ebaec0722d8156fde5268c3"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f3d1ad3bc8ebb4db925afa706aa865c4bfb40d52509f143491ac0df2440ee5d2"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win32.whl", hash = "sha256:dc37cb988c8854eb42bda2e438aaf553536566657d157c4473cc8aad5692a779"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:2cd4ce750c34a0318fc2d6c727cc255e9658d12a5cf3f2d0473f1c27157bdaeb"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ca95b21f022e995e062b371d1f42d901452bcbedd2c02f036de677119503355"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ac1e1d7e1f1b64054c4eb04eb4869a7a5eef2261440e73943cc1b1bc3c828c18"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0711845e953fce6ab781221aacffa2a66dbc3289f8343e5babd7b2ea34da6c90"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b7d28582017de35cb64eb4e4fa603e753095108ca03745f5d17295970ee631f"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7be57db08bd366a37db3aa3a6187941ee21196e8b14975db337ddc7d1490649d"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a0ac0f56860398d2628ce389826ce83fb3a557d0c9a2351e8a2eac6eb869983"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a9045186ff13bc826fef16be53736a85029aae3c6adfe52e666cad00d7ca623b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6c4c56b6eab9004e92ad8a48bb54913fdd71fc8a748ae42a27b9e26041646f8b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win32.whl", hash = "sha256:37e989c44b51839d0c97466fa2b623638b9470d56d79e329f359f0e8fa6d83db"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:5494c5a84fee4309d7d973366ca6b8b9f8ba1d6b254e93b7c506264ef74f2cef"}, + {file = "pyinstrument-4.6.1.tar.gz", hash = "sha256:f4731b27121350f5a983d358d2272fe3df2f538aed058f57217eef7801a89288"}, ] [package.extras] @@ -667,13 +667,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.6.0" +version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, - {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, ] [package.dependencies] @@ -723,13 +723,13 @@ files = [ [[package]] name = "urllib3" -version = "2.0.6" +version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, - {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, ] [package.extras] diff --git a/benchmarks/pyproject.toml b/benchmarks/pyproject.toml index c0c82142..0c4019db 100644 --- a/benchmarks/pyproject.toml +++ b/benchmarks/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "package" +name = "opensearch-py-benchmarks" version = "0.1.0" description = "OpenSearch Python client benchmarks." authors = ["Daniel Doubrovkine "] diff --git a/guides/async.md b/guides/async.md index 5de0971f..a3b4be66 100644 --- a/guides/async.md +++ b/guides/async.md @@ -11,7 +11,7 @@ # Asynchronous I/O -This client supports asynchronous I/O that improves performance and increases throughput. See [hello-async.py](../samples/hello/hello-async.py) or [knn-async-basics.py](../samples/knn/knn-async-basics.py) for a working asynchronous sample. +This client supports asynchronous I/O that improves performance and increases throughput. See [hello_async.py](../samples/hello/hello_async.py) or [knn_async_basics.py](../samples/knn/knn_async_basics.py) for a working asynchronous sample. ## Setup diff --git a/guides/bulk.md b/guides/bulk.md index 251be4f8..52057efd 100644 --- a/guides/bulk.md +++ b/guides/bulk.md @@ -10,7 +10,7 @@ The [Bulk API](https://opensearch.org/docs/latest/api-reference/document-apis/bu ## Line-Delimited JSON -The `bulk` API accepts line-delimited JSON. This method requires the caller to evaluate the return value and parse errors in the case of a failure or partial success. See [samples/bulk/bulk-ld.py](../samples/bulk/bulk-ld.py) for a working sample. +The `bulk` API accepts line-delimited JSON. This method requires the caller to evaluate the return value and parse errors in the case of a failure or partial success. See [samples/bulk/bulk_ld.py](../samples/bulk/bulk_ld.py) for a working sample. ```python from opensearchpy import OpenSearch @@ -33,7 +33,7 @@ else: print(f"Bulk-inserted {len(rc['items'])} items.") ``` -The client can also serialize an array of data into bulk-delimited JSON for you. See [samples/bulk/bulk-array.py](../samples/bulk/bulk-array.py) for a working sample. +The client can also serialize an array of data into bulk-delimited JSON for you. See [samples/bulk/bulk_array.py](../samples/bulk/bulk_array.py) for a working sample. ```python data = [ @@ -56,7 +56,7 @@ else: ## Bulk Helper -A helper can generate the line-delimited JSON for you from a Python array that contains `_index` and `_id` fields, and parse errors. The `helpers.bulk` implementation will raise `BulkIndexError` if any error occurs. This may indicate a partially successful result. See [samples/bulk/bulk-helpers.py](../samples/bulk/bulk-helpers.py) for a working sample. +A helper can generate the line-delimited JSON for you from a Python array that contains `_index` and `_id` fields, and parse errors. The `helpers.bulk` implementation will raise `BulkIndexError` if any error occurs. This may indicate a partially successful result. See [samples/bulk/bulk_helpers.py](../samples/bulk/bulk_helpers.py) for a working sample. ```python from opensearchpy import OpenSearch, helpers diff --git a/guides/plugins/knn.md b/guides/plugins/knn.md index a7775c88..8eea69c6 100644 --- a/guides/plugins/knn.md +++ b/guides/plugins/knn.md @@ -3,7 +3,7 @@ - [Create an Index](#create-an-index) - [Index Vectors](#index-vectors) - [Search for Nearest Neighbors](#search-for-nearest-neighbors) - - [Approximate k-NN with a Boolean Filter](#approximate-k-nn-with-a-boolean-filter) + - [Approximate k-NN with a Boolean Filter](#approximate-k-nn-with-a-boolean_filter) - [Approximate k-NN with an Efficient Filter](#approximate-k-nn-with-an-efficient-filter) # k-NN Plugin @@ -12,10 +12,10 @@ Short for k-nearest neighbors, the k-NN plugin enables users to search for the k ## Basic Approximate k-NN -In the following example we create a 5-dimensional k-NN index with random data. You can find a synchronous version of this working sample in [samples/knn/knn-basics.py](../../samples/knn/knn-basics.py) and an asynchronous one in [samples/knn/knn-async-basics.py](../../samples/knn/knn-async-basics.py). +In the following example we create a 5-dimensional k-NN index with random data. You can find a synchronous version of this working sample in [samples/knn/knn_basics.py](../../samples/knn/knn_basics.py) and an asynchronous one in [samples/knn/knn_async_basics.py](../../samples/knn/knn_async_basics.py). ```bash -$ poetry run python knn/knn-basics.py +$ poetry run python knn/knn_basics.py Searching for [0.61, 0.05, 0.16, 0.75, 0.49] ... {'_index': 'my-index', '_id': '3', '_score': 0.9252405, '_source': {'values': [0.64, 0.3, 0.27, 0.68, 0.51]}} @@ -93,10 +93,10 @@ for hit in results["hits"]["hits"]: ## Approximate k-NN with a Boolean Filter -In [the boolean-filter.py sample](../../samples/knn/knn-boolean-filter.py) we create a 5-dimensional k-NN index with random data and a `metadata` field that contains a book genre (e.g. `fiction`). The search query is a k-NN search filtered by genre. The filter clause is outside the k-NN query clause and is applied after the k-NN search. +In [the knn_boolean_filter.py sample](../../samples/knn/knn_boolean_filter.py) we create a 5-dimensional k-NN index with random data and a `metadata` field that contains a book genre (e.g. `fiction`). The search query is a k-NN search filtered by genre. The filter clause is outside the k-NN query clause and is applied after the k-NN search. ```bash -$ poetry run python knn/knn-boolean-filter.py +$ poetry run python knn/knn_boolean_filter.py Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... @@ -106,10 +106,10 @@ Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... ## Approximate k-NN with an Efficient Filter -In [the lucene-filter.py sample](../../samples/knn/knn-efficient-filter.py) we implement the example in [the k-NN documentation](https://opensearch.org/docs/latest/search-plugins/knn/filter-search-knn/), which creates an index that uses the Lucene engine and HNSW as the method in the mapping, containing hotel location and parking data, then search for the top three hotels near the location with the coordinates `[5, 4]` that are rated between 8 and 10, inclusive, and provide parking. +In [the knn_efficient_filter.py sample](../../samples/knn/knn_efficient_filter.py) we implement the example in [the k-NN documentation](https://opensearch.org/docs/latest/search-plugins/knn/filter-search-knn/), which creates an index that uses the Lucene engine and HNSW as the method in the mapping, containing hotel location and parking data, then search for the top three hotels near the location with the coordinates `[5, 4]` that are rated between 8 and 10, inclusive, and provide parking. ```bash -$ poetry run python knn/knn-efficient-filter.py +$ poetry run python knn/knn_efficient_filter.py {'_index': 'hotels-index', '_id': '3', '_score': 0.72992706, '_source': {'location': [4.9, 3.4], 'parking': 'true', 'rating': 9}} {'_index': 'hotels-index', '_id': '6', '_score': 0.3012048, '_source': {'location': [6.4, 3.4], 'parking': 'true', 'rating': 9}} diff --git a/noxfile.py b/noxfile.py index 510d30c7..be71ac0b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -60,11 +60,12 @@ def test(session: Any) -> None: @nox.session(python=["3.7"]) # type: ignore def format(session: Any) -> None: + session.install(".") session.install("black", "isort") - session.run("isort", "--profile=black", *SOURCE_FILES) - session.run("black", "--target-version=py33", *SOURCE_FILES) - session.run("python", "utils/license-headers.py", "fix", *SOURCE_FILES) + session.run("isort", *SOURCE_FILES) + session.run("black", *SOURCE_FILES) + session.run("python", "utils/license_headers.py", "fix", *SOURCE_FILES) lint(session) @@ -76,6 +77,7 @@ def lint(session: Any) -> None: "black", "mypy", "isort", + "pylint", "types-requests", "types-six", "types-simplejson", @@ -85,10 +87,11 @@ def lint(session: Any) -> None: "types-pytz", ) - session.run("isort", "--check", "--profile=black", *SOURCE_FILES) - session.run("black", "--target-version=py33", "--check", *SOURCE_FILES) + session.run("isort", "--check", *SOURCE_FILES) + session.run("black", "--check", *SOURCE_FILES) session.run("flake8", *SOURCE_FILES) - session.run("python", "utils/license-headers.py", "check", *SOURCE_FILES) + session.run("pylint", *SOURCE_FILES) + session.run("python", "utils/license_headers.py", "check", *SOURCE_FILES) # Workaround to make '-r' to still work despite uninstalling aiohttp below. session.run("python", "-m", "pip", "install", "aiohttp") @@ -117,5 +120,5 @@ def docs(session: Any) -> None: @nox.session() # type: ignore def generate(session: Any) -> None: session.install("-rdev-requirements.txt") - session.run("python", "utils/generate-api.py") + session.run("python", "utils/generate_api.py") format(session) diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index fa8b5f04..dfe26775 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from __future__ import unicode_literals diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index 4310511c..fdd3dec2 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 905853e9..4838b5b4 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index 6bc9a343..44744de4 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index a4ef8b5b..9ec3b130 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index 2f8cff27..81f44ec5 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index 36146fad..b425412a 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index 8a72f41c..7e021106 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -8,14 +8,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index dc893f86..c4cca167 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -8,14 +8,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 97ffec72..2519d633 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 39aefe93..9484c500 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ import warnings diff --git a/opensearchpy/_async/client/utils.py b/opensearchpy/_async/client/utils.py index 45ad552b..0c2235aa 100644 --- a/opensearchpy/_async/client/utils.py +++ b/opensearchpy/_async/client/utils.py @@ -26,9 +26,9 @@ # under the License. -from ...client.utils import ( # noqa +from ...client.utils import NamespacedClient # noqa +from ...client.utils import ( SKIP_IN_PATH, - NamespacedClient, _bulk_body, _escape, _make_path, diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index 4f2a9918..42d63dfd 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -260,7 +260,7 @@ def search(self, using: Any = None) -> Any: using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using: Any = None) -> Any: + def updateByQuery(self, using: Any = None) -> Any: # pylint: disable=invalid-name """ Return a :class:`~opensearchpy.AsyncUpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index a9c71552..e6f7021b 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from __future__ import unicode_literals diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 91adbf35..8596a6f3 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index f2770f2d..2686bbfa 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index 8617708e..31b777e7 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 7cdc7e57..9dc12f38 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index 4bf558b9..13ba1a69 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 6a7b5db1..9ad6534a 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index a019a99c..799c6aa1 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -8,14 +8,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 6d1574ea..a10ec655 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -8,14 +8,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index fe6536fa..50f67357 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ from typing import Any diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 7e675233..29361e2b 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -26,14 +26,13 @@ # under the License. -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ import warnings diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index cb8bc7d7..ca874943 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -32,7 +32,7 @@ from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse string_types = str, bytes -map = map +map = map # pylint: disable=invalid-name def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index a2774c15..eda6adfe 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -46,9 +46,9 @@ # create the opensearchpy.trace logger, but only set propagate to False if the # logger hasn't already been configured -_tracer_already_configured = "opensearchpy.trace" in logging.Logger.manager.loggerDict +TRACER_ALREADY_CONFIGURED = "opensearchpy.trace" in logging.Logger.manager.loggerDict tracer = logging.getLogger("opensearchpy.trace") -if not _tracer_already_configured: +if not TRACER_ALREADY_CONFIGURED: tracer.propagate = False _WARNING_RE = re.compile(r"\"([^\"]*)\"") diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index 59795614..4e06e7d9 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -33,7 +33,9 @@ from .utils import DslBase -def A(name_or_agg: Any, filter: Any = None, **params: Any) -> Any: +def A( # pylint: disable=invalid-name + name_or_agg: Any, filter: Any = None, **params: Any +) -> Any: if filter is not None: if name_or_agg != "filter": raise ValueError( diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index 00452f86..f0885aa5 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -31,7 +31,7 @@ from .utils import DslBase -def SF(name_or_sf: Any, **params: Any) -> Any: +def SF(name_or_sf: Any, **params: Any) -> Any: # pylint: disable=invalid-name # {"script_score": {"script": "_score"}, "filter": {}} if isinstance(name_or_sf, collections_abc.Mapping): if params: diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index 3fbb475a..3b6185b4 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -279,7 +279,9 @@ def search(self, using: Optional[OpenSearch] = None) -> Search: using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using: Optional[OpenSearch] = None) -> UpdateByQuery: + def updateByQuery( # pylint: disable=invalid-name + self, using: Optional[OpenSearch] = None + ) -> UpdateByQuery: """ Return a :class:`~opensearchpy.UpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index e299f94a..b7861f78 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -35,7 +35,9 @@ from .utils import DslBase -def Q(name_or_query: Any = "match_all", **params: Any) -> Any: +def Q( # pylint: disable=invalid-name + name_or_query: Any = "match_all", **params: Any +) -> Any: # {"match": {"title": "python"}} if isinstance(name_or_query, collections_abc.Mapping): if params: diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index 562f82e2..a8eb3859 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -18,7 +18,7 @@ # urllib3.disable_warnings() -def test_opensearch_examples() -> None: +def main() -> None: # Set up client = OpenSearch( hosts=["https://localhost:9200"], @@ -101,4 +101,4 @@ def test_opensearch_examples() -> None: if __name__ == "__main__": - test_opensearch_examples() + main() diff --git a/samples/aws/README.md b/samples/aws/README.md index 17ad4ee0..bdb30c2b 100644 --- a/samples/aws/README.md +++ b/samples/aws/README.md @@ -11,7 +11,7 @@ export AWS_REGION=us-west-2 export SERVICE=es # use "aoss" for OpenSearch Serverless. export ENDPOINT=https://....us-west-2.es.amazonaws.com -poetry run aws/search-urllib.py +poetry run aws/search_urllib.py ``` This will output the version of OpenSearch and a search result. diff --git a/samples/aws/search-requests.py b/samples/aws/search-requests.py deleted file mode 100644 index 0af366f0..00000000 --- a/samples/aws/search-requests.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -import logging -from os import environ -from time import sleep -from urllib.parse import urlparse - -from boto3 import Session - -from opensearchpy import OpenSearch, RequestsAWSV4SignerAuth, RequestsHttpConnection - -# verbose logging -logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) - -# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -url = urlparse(environ["ENDPOINT"]) -region = environ.get("AWS_REGION", "us-east-1") -service = environ.get("SERVICE", "es") - -credentials = Session().get_credentials() - -auth = RequestsAWSV4SignerAuth(credentials, region, service) - -client = OpenSearch( - hosts=[{"host": url.netloc, "port": url.port or 443}], - http_auth=auth, - use_ssl=True, - verify_certs=True, - connection_class=RequestsHttpConnection, - timeout=30, -) - -# TODO: remove when OpenSearch Serverless adds support for / -if service == "es": - info = client.info() - print(f"{info['version']['distribution']}: {info['version']['number']}") - -# create an index -index = "movies" -client.indices.create(index=index) - -try: - # index data - document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} - client.index(index=index, body=document, id="1") - - # wait for the document to index - sleep(1) - - # search for the document - results = client.search(body={"query": {"match": {"director": "miller"}}}) - for hit in results["hits"]["hits"]: - print(hit["_source"]) - - # delete the document - client.delete(index=index, id="1") -finally: - # delete the index - client.indices.delete(index=index) diff --git a/samples/aws/search-urllib3.py b/samples/aws/search-urllib3.py deleted file mode 100644 index 534caf40..00000000 --- a/samples/aws/search-urllib3.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -import logging -from os import environ -from time import sleep -from urllib.parse import urlparse - -from boto3 import Session - -from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection - -# verbose logging -logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) - -# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -url = urlparse(environ["ENDPOINT"]) -region = environ.get("AWS_REGION", "us-east-1") -service = environ.get("SERVICE", "es") - -credentials = Session().get_credentials() - -auth = Urllib3AWSV4SignerAuth(credentials, region, service) - -client = OpenSearch( - hosts=[{"host": url.netloc, "port": url.port or 443}], - http_auth=auth, - use_ssl=True, - verify_certs=True, - connection_class=Urllib3HttpConnection, - timeout=30, -) - -# TODO: remove when OpenSearch Serverless adds support for / -if service == "es": - info = client.info() - print(f"{info['version']['distribution']}: {info['version']['number']}") - -# create an index -index = "movies" -client.indices.create(index=index) - -try: - # index data - document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} - client.index(index=index, body=document, id="1") - - # wait for the document to index - sleep(1) - - # search for the document - results = client.search(body={"query": {"match": {"director": "miller"}}}) - for hit in results["hits"]["hits"]: - print(hit["_source"]) - - # delete the document - client.delete(index=index, id="1") -finally: - # delete the index - client.indices.delete(index=index) diff --git a/samples/aws/search_requests.py b/samples/aws/search_requests.py new file mode 100644 index 00000000..84c7f47a --- /dev/null +++ b/samples/aws/search_requests.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session + +from opensearchpy import OpenSearch, RequestsAWSV4SignerAuth, RequestsHttpConnection + + +def main() -> None: + # verbose logging + logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) + + # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com + url = urlparse(environ["ENDPOINT"]) + region = environ.get("AWS_REGION", "us-east-1") + service = environ.get("SERVICE", "es") + + credentials = Session().get_credentials() + + auth = RequestsAWSV4SignerAuth(credentials, region, service) + + client = OpenSearch( + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + timeout=30, + ) + + # TODO: remove when OpenSearch Serverless adds support for / + if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + + # create an index + index = "movies" + client.indices.create(index=index) + + try: + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) + + # delete the document + client.delete(index=index, id="1") + finally: + # delete the index + client.indices.delete(index=index) + + +if __name__ == "__main__": + main() diff --git a/samples/aws/search_urllib3.py b/samples/aws/search_urllib3.py new file mode 100644 index 00000000..00581683 --- /dev/null +++ b/samples/aws/search_urllib3.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session + +from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection + + +def main() -> None: + # verbose logging + logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) + + # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com + url = urlparse(environ["ENDPOINT"]) + region = environ.get("AWS_REGION", "us-east-1") + service = environ.get("SERVICE", "es") + + credentials = Session().get_credentials() + + auth = Urllib3AWSV4SignerAuth(credentials, region, service) + + client = OpenSearch( + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=Urllib3HttpConnection, + timeout=30, + ) + + # TODO: remove when OpenSearch Serverless adds support for / + if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + + # create an index + index = "movies" + client.indices.create(index=index) + + try: + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) + + # delete the document + client.delete(index=index, id="1") + finally: + # delete the index + client.indices.delete(index=index) + + +if __name__ == "__main__": + main() diff --git a/samples/bulk/bulk-array.py b/samples/bulk/bulk-array.py deleted file mode 100755 index 5191a291..00000000 --- a/samples/bulk/bulk-array.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -from typing import Any - -from opensearchpy import OpenSearch - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "mappings": { - "properties": { - "value": {"type": "float"}, - } - } - }, - ) - -# index data -data: Any = [] -for i in range(100): - data.append({"index": {"_index": index_name, "_id": i}}) - data.append({"value": i}) - -rc = client.bulk(data) -if rc["errors"]: - print("There were errors:") - for item in rc["items"]: - print(f"{item['index']['status']}: {item['index']['error']['type']}") -else: - print(f"Bulk-inserted {len(rc['items'])} items.") - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/bulk/bulk-helpers.py b/samples/bulk/bulk-helpers.py deleted file mode 100755 index 678b2c09..00000000 --- a/samples/bulk/bulk-helpers.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -from typing import Any - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "mappings": { - "properties": { - "value": {"type": "float"}, - } - } - }, - ) - -# index data -data = [] -for i in range(100): - data.append({"_index": index_name, "_id": i, "value": i}) - -# serialized bulk raising an exception on error -rc = helpers.bulk(client, data) -print(f"Bulk-inserted {rc[0]} items (bulk).") - -# parallel bulk with explicit error checking -succeeded = [] -failed = [] -for success, item in helpers.parallel_bulk( - client, - actions=data, - chunk_size=10, - raise_on_error=False, - raise_on_exception=False, - max_chunk_bytes=20 * 1024 * 1024, - request_timeout=60, -): - if success: - succeeded.append(item) - else: - failed.append(item) - -if len(failed) > 0: - print(f"There were {len(failed)} errors:") - for item in failed: - print(item["index"]["error"]) - -if len(succeeded) > 0: - print(f"Bulk-inserted {len(succeeded)} items (parallel_bulk).") - - -# streaming bulk with a data generator -def _generate_data() -> Any: - for i in range(100): - yield {"_index": index_name, "_id": i, "value": i} - - -succeeded = [] -failed = [] -for success, item in helpers.streaming_bulk(client, actions=_generate_data()): - if success: - succeeded.append(item) - else: - failed.append(item) - -if len(failed) > 0: - print(f"There were {len(failed)} errors:") - for item in failed: - print(item["index"]["error"]) - -if len(succeeded) > 0: - print(f"Bulk-inserted {len(succeeded)} items (streaming_bulk).") - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/bulk/bulk-ld.py b/samples/bulk/bulk-ld.py deleted file mode 100755 index fff0ae98..00000000 --- a/samples/bulk/bulk-ld.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import json -import os - -from opensearchpy import OpenSearch - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "mappings": { - "properties": { - "value": {"type": "float"}, - } - } - }, - ) - -# index data -data = "" -for i in range(100): - data += json.dumps({"index": {"_index": index_name, "_id": i}}) + "\n" - data += json.dumps({"value": i}) + "\n" - -rc = client.bulk(data) -if rc["errors"]: - print("There were errors:") - for item in rc["items"]: - print(f"{item['index']['status']}: {item['index']['error']['type']}") -else: - print(f"Bulk-inserted {len(rc['items'])} items.") - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/bulk/bulk_array.py b/samples/bulk/bulk_array.py new file mode 100755 index 00000000..e8ea6a09 --- /dev/null +++ b/samples/bulk/bulk_array.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +from typing import Any + +from opensearchpy import OpenSearch + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + # index data + data: Any = [] + for i in range(100): + data.append({"index": {"_index": index_name, "_id": i}}) + data.append({"value": i}) + + rc = client.bulk(data) + if rc["errors"]: + print("There were errors:") + for item in rc["items"]: + print(f"{item['index']['status']}: {item['index']['error']['type']}") + else: + print(f"Bulk-inserted {len(rc['items'])} items.") + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/bulk/bulk_helpers.py b/samples/bulk/bulk_helpers.py new file mode 100755 index 00000000..0468b0f5 --- /dev/null +++ b/samples/bulk/bulk_helpers.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +from typing import Any + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + # index data + data = [] + for i in range(100): + data.append({"_index": index_name, "_id": i, "value": i}) + + # serialized bulk raising an exception on error + rc = helpers.bulk(client, data) + print(f"Bulk-inserted {rc[0]} items (bulk).") + + # parallel bulk with explicit error checking + succeeded = [] + failed = [] + for success, item in helpers.parallel_bulk( + client, + actions=data, + chunk_size=10, + raise_on_error=False, + raise_on_exception=False, + max_chunk_bytes=20 * 1024 * 1024, + request_timeout=60, + ): + if success: + succeeded.append(item) + else: + failed.append(item) + + if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(item["index"]["error"]) + + if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items (parallel_bulk).") + + # streaming bulk with a data generator + def _generate_data() -> Any: + for i in range(100): + yield {"_index": index_name, "_id": i, "value": i} + + succeeded = [] + failed = [] + for success, item in helpers.streaming_bulk(client, actions=_generate_data()): + if success: + succeeded.append(item) + else: + failed.append(item) + + if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(item["index"]["error"]) + + if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items (streaming_bulk).") + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/bulk/bulk_ld.py b/samples/bulk/bulk_ld.py new file mode 100755 index 00000000..0bf556fa --- /dev/null +++ b/samples/bulk/bulk_ld.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import json +import os + +from opensearchpy import OpenSearch + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + # index data + data = "" + for i in range(100): + data += json.dumps({"index": {"_index": index_name, "_id": i}}) + "\n" + data += json.dumps({"value": i}) + "\n" + + rc = client.bulk(data) + if rc["errors"]: + print("There were errors:") + for item in rc["items"]: + print(f"{item['index']['status']}: {item['index']['error']['type']}") + else: + print(f"Bulk-inserted {len(rc['items'])} items.") + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py index 1d338da7..c21ae44a 100644 --- a/samples/document_lifecycle/document_lifecycle_sample.py +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -17,78 +17,89 @@ # urllib3.disable_warnings() -# Connect to OpenSearch -client = OpenSearch( - hosts=["https://localhost:9200"], - use_ssl=True, - verify_certs=False, - http_auth=("admin", "admin"), -) - -# Create an index -index = "movies" -if not client.indices.exists(index=index): - client.indices.create(index=index) - -# Create documents -client.index(index=index, id=1, body={"title": "Beauty and the Beast", "year": 1991}) -client.index( - index=index, - id=2, - body={"title": "Beauty and the Beast - Live Action", "year": 2017}, -) - -# Index a document -client.index(index=index, id=2, body={"title": "The Lion King", "year": 1994}) - -# Create a document with auto-generated ID -result = client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) -print(result) - -# Get a document -result = client.get(index=index, id=1)["_source"] -print(result) - -# Get a document with _source includes -result = client.get(index=index, id=1, _source_includes=["title"])["_source"] -print(result) - -# Get a document with _source excludes -result = client.get(index=index, id=1, _source_excludes=["title"])["_source"] -print(result) - -# Get multiple documents -result = client.mget(index=index, body={"docs": [{"_id": 1}, {"_id": 2}]})["docs"] -print(result) - -# Check if a document exists -result = client.exists(index=index, id=1) -print(result) - -# Update a document -client.update(index=index, id=1, body={"doc": {"year": 1995}}) - -# Update a document using script -client.update(index=index, id=1, body={"script": {"source": "ctx._source.year += 5"}}) - -# Update multiple documents by query -client.update_by_query( - index=index, - body={ - "script": {"source": "ctx._source.year -= 1"}, - "query": {"range": {"year": {"gt": 2023}}}, - }, -) - -# Delete a document -client.delete(index=index, id=1) - -# Delete a document with ignore 404 -client.delete(index=index, id=1, ignore=404) - -# Delete multiple documents by query -client.delete_by_query(index=index, body={"query": {"range": {"year": {"gt": 2023}}}}) - -# Delete the index -client.indices.delete(index=index) -print("Deleted index!") +def main() -> None: + # Connect to OpenSearch + client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), + ) + + # Create an index + index = "movies" + if not client.indices.exists(index=index): + client.indices.create(index=index) + + # Create documents + client.index( + index=index, id=1, body={"title": "Beauty and the Beast", "year": 1991} + ) + client.index( + index=index, + id=2, + body={"title": "Beauty and the Beast - Live Action", "year": 2017}, + ) + + # Index a document + client.index(index=index, id=2, body={"title": "The Lion King", "year": 1994}) + + # Create a document with auto-generated ID + result = client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) + print(result) + + # Get a document + result = client.get(index=index, id=1)["_source"] + print(result) + + # Get a document with _source includes + result = client.get(index=index, id=1, _source_includes=["title"])["_source"] + print(result) + + # Get a document with _source excludes + result = client.get(index=index, id=1, _source_excludes=["title"])["_source"] + print(result) + + # Get multiple documents + result = client.mget(index=index, body={"docs": [{"_id": 1}, {"_id": 2}]})["docs"] + print(result) + + # Check if a document exists + result = client.exists(index=index, id=1) + print(result) + + # Update a document + client.update(index=index, id=1, body={"doc": {"year": 1995}}) + + # Update a document using script + client.update( + index=index, id=1, body={"script": {"source": "ctx._source.year += 5"}} + ) + + # Update multiple documents by query + client.update_by_query( + index=index, + body={ + "script": {"source": "ctx._source.year -= 1"}, + "query": {"range": {"year": {"gt": 2023}}}, + }, + ) + + # Delete a document + client.delete(index=index, id=1) + + # Delete a document with ignore 404 + client.delete(index=index, id=1, ignore=404) + + # Delete multiple documents by query + client.delete_by_query( + index=index, body={"query": {"range": {"year": {"gt": 2023}}}} + ) + + # Delete the index + client.indices.delete(index=index) + print("Deleted index!") + + +if __name__ == "__main__": + main() diff --git a/samples/hello/hello.py b/samples/hello/hello.py index 0b589c9d..a614f085 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -15,62 +15,68 @@ # connect to OpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) +def main() -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. -info = client.info() -print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) -# create an index + info = client.info() + print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") -index_name = "test-index" + # create an index -index_body = {"settings": {"index": {"number_of_shards": 4}}} + index_name = "test-index" -response = client.indices.create(index_name, body=index_body) + index_body = {"settings": {"index": {"number_of_shards": 4}}} -print(response) + response = client.indices.create(index_name, body=index_body) -# add a document to the index + print(response) -document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} + # add a document to the index -id = "1" + document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} -response = client.index(index=index_name, body=document, id=id, refresh=True) + id = "1" -print(response) + response = client.index(index=index_name, body=document, id=id, refresh=True) -# search for a document + print(response) -q = "miller" + # search for a document -query = { - "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, -} + q = "miller" -response = client.search(body=query, index=index_name) + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + } -print(response) + response = client.search(body=query, index=index_name) -# delete the document + print(response) -response = client.delete(index=index_name, id=id) + # delete the document -print(response) + response = client.delete(index=index_name, id=id) -# delete the index + print(response) -response = client.indices.delete(index=index_name) + # delete the index -print(response) + response = client.indices.delete(index=index_name) + + print(response) + + +if __name__ == "__main__": + main() diff --git a/samples/hello/hello-async.py b/samples/hello/hello_async.py similarity index 100% rename from samples/hello/hello-async.py rename to samples/hello/hello_async.py diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py index 4fe580ac..ca0f8310 100644 --- a/samples/index_template/index_template_sample.py +++ b/samples/index_template/index_template_sample.py @@ -11,119 +11,127 @@ # GitHub history for details. from opensearchpy import OpenSearch -# Create a client instance -client = OpenSearch( - hosts=["https://localhost:9200"], - use_ssl=True, - verify_certs=False, - http_auth=("admin", "admin"), -) - -# You can create an index template to define default settings and mappings for indices of certain patterns. The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: -client.indices.put_index_template( - name="books", - body={ - "index_patterns": ["books-*"], - "priority": 1, - "template": { - "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}}, - "mappings": { - "properties": { - "title": {"type": "text"}, - "author": {"type": "text"}, - "published_on": {"type": "date"}, - "pages": {"type": "integer"}, - } + +def main() -> None: + # Create a client instance + client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), + ) + + # You can create an index template to define default settings and mappings for indices of certain patterns. + # The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: + client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}}, + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } + }, }, }, - }, -) - -# Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: -client.indices.create(index="books-nonfiction") -print(client.indices.get(index="books-nonfiction")) - -# If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: -client.indices.put_index_template( - name="books", - body={ - "index_patterns": ["books-*"], - "priority": 1, - "template": { - "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + ) + + # Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. + # Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: + client.indices.create(index="books-nonfiction") + print(client.indices.get(index="books-nonfiction")) + + # If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. + # The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: + client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, }, - }, -) - -client.indices.put_index_template( - name="books-fiction", - body={ - "index_patterns": ["books-fiction-*"], - "priority": 2, - "template": { - "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + ) + + client.indices.put_index_template( + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "priority": 2, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, }, - }, -) - -# # Test multiple index templates -client.indices.create(index="books-fiction-romance") -print(client.indices.get(index="books-fiction-romance")) - - -# Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: -client.cluster.put_component_template( - name="books_mappings", - body={ - "template": { - "mappings": { - "properties": { - "title": {"type": "text"}, - "author": {"type": "text"}, - "published_on": {"type": "date"}, - "pages": {"type": "integer"}, + ) + + # # Test multiple index templates + client.indices.create(index="books-fiction-romance") + print(client.indices.get(index="books-fiction-romance")) + + # Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. + # The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: + client.cluster.put_component_template( + name="books_mappings", + body={ + "template": { + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } } } - } - }, -) - -client.indices.put_index_template( - name="books", - body={ - "index_patterns": ["books-*"], - "composed_of": ["books_mappings"], - "priority": 4, - "template": { - "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} }, - }, -) - -client.indices.put_index_template( - name="books-fiction", - body={ - "index_patterns": ["books-fiction-*"], - "composed_of": ["books_mappings"], - "priority": 5, - "template": { - "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + ) + + client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "composed_of": ["books_mappings"], + "priority": 4, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, + }, + ) + + client.indices.put_index_template( + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "composed_of": ["books_mappings"], + "priority": 5, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, }, - }, -) + ) + + # Test composable index templates + client.indices.create(index="books-fiction-horror") + print(client.indices.get(index="books-fiction-horror")) + # Get an index template + print(client.indices.get_index_template(name="books")) -# Test composable index templates -client.indices.create(index="books-fiction-horror") -print(client.indices.get(index="books-fiction-horror")) + # Delete an index template + client.indices.delete_index_template(name="books") -# Get an index template -print(client.indices.get_index_template(name="books")) + # Cleanup + client.indices.delete(index="books-*") + client.indices.delete_index_template(name="books-fiction") + client.cluster.delete_component_template(name="books_mappings") -# Delete an index template -client.indices.delete_index_template(name="books") -# Cleanup -client.indices.delete(index="books-*") -client.indices.delete_index_template(name="books-fiction") -client.cluster.delete_component_template(name="books_mappings") +if __name__ == "__main__": + main() diff --git a/samples/json/json-hello.py b/samples/json/json-hello.py deleted file mode 100755 index 5b39e41b..00000000 --- a/samples/json/json-hello.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -from opensearchpy import OpenSearch - -# connect to OpenSearch - -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -info = client.http.get("/") -print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") - -# create an index - -index_name = "movies" - -index_body = {"settings": {"index": {"number_of_shards": 4}}} - -print(client.http.put(f"/{index_name}", body=index_body)) - -# add a document to the index - -document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} - -id = "1" - -print(client.http.put(f"/{index_name}/_doc/{id}?refresh=true", body=document)) - -# search for a document - -q = "miller" - -query = { - "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, -} - -print(client.http.post(f"/{index_name}/_search", body=query)) - -# delete the document - -print(client.http.delete(f"/{index_name}/_doc/{id}")) - -# delete the index - -print(client.http.delete(f"/{index_name}")) diff --git a/samples/json/json_hello.py b/samples/json/json_hello.py new file mode 100755 index 00000000..4b2e10e2 --- /dev/null +++ b/samples/json/json_hello.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +from opensearchpy import OpenSearch + + +def main() -> None: + # connect to OpenSearch + + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + info = client.http.get("/") + print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + + # create an index + + index_name = "movies" + + index_body = {"settings": {"index": {"number_of_shards": 4}}} + + print(client.http.put(f"/{index_name}", body=index_body)) + + # add a document to the index + + document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} + + id = "1" + + print(client.http.put(f"/{index_name}/_doc/{id}?refresh=true", body=document)) + + # search for a document + + q = "miller" + + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + } + + print(client.http.post(f"/{index_name}/_search", body=query)) + + # delete the document + + print(client.http.delete(f"/{index_name}/_doc/{id}")) + + # delete the index + + print(client.http.delete(f"/{index_name}")) + + +if __name__ == "__main__": + main() diff --git a/samples/json/json-hello-async.py b/samples/json/json_hello_async.py similarity index 100% rename from samples/json/json-hello-async.py rename to samples/json/json_hello_async.py diff --git a/samples/knn/knn-basics.py b/samples/knn/knn-basics.py deleted file mode 100755 index 96efb028..00000000 --- a/samples/knn/knn-basics.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -import random - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" -dimensions = 5 - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "settings": {"index.knn": True}, - "mappings": { - "properties": { - "values": {"type": "knn_vector", "dimension": dimensions}, - } - }, - }, - ) - -# index data -vectors = [] -for i in range(10): - vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append( - { - "_index": index_name, - "_id": i, - "values": vec, - } - ) - -# bulk index -helpers.bulk(client, vectors) - -client.indices.refresh(index=index_name) - -# search -vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) -print(f"Searching for {vec} ...") - -search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} -results = client.search(index=index_name, body=search_query) -for hit in results["hits"]["hits"]: - print(hit) - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/knn/knn-boolean-filter.py b/samples/knn/knn-boolean-filter.py deleted file mode 100755 index 5ae7704c..00000000 --- a/samples/knn/knn-boolean-filter.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -import random - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" -dimensions = 5 - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "settings": {"index.knn": True}, - "mappings": { - "properties": { - "values": {"type": "knn_vector", "dimension": dimensions}, - } - }, - }, - ) - -# index data -vectors = [] -genres = ["fiction", "drama", "romance"] -for i in range(3000): - vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append( - { - "_index": index_name, - "_id": i, - "values": vec, - "metadata": {"genre": random.choice(genres)}, - } - ) - -# bulk index -helpers.bulk(client, vectors) - -client.indices.refresh(index=index_name) - -# search -genre = random.choice(genres) -vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) -print(f"Searching for {vec} with the '{genre}' genre ...") - -search_query = { - "query": { - "bool": { - "filter": {"bool": {"must": [{"term": {"metadata.genre": genre}}]}}, - "must": {"knn": {"values": {"vector": vec, "k": 5}}}, - } - } -} -results = client.search(index=index_name, body=search_query) -for hit in results["hits"]["hits"]: - print(hit) - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/knn/knn-efficient-filter.py b/samples/knn/knn-efficient-filter.py deleted file mode 100755 index cbfd41ad..00000000 --- a/samples/knn/knn-efficient-filter.py +++ /dev/null @@ -1,180 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "hotels-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "settings": { - "index.knn": True, - "knn.algo_param.ef_search": 100, - "number_of_shards": 1, - "number_of_replicas": 0, - }, - "mappings": { - "properties": { - "location": { - "type": "knn_vector", - "dimension": 2, - "method": { - "name": "hnsw", - "space_type": "l2", - "engine": "lucene", - "parameters": {"ef_construction": 100, "m": 16}, - }, - }, - } - }, - }, - ) - -# index data -vectors = [ - { - "_index": "hotels-index", - "_id": "1", - "location": [5.2, 4.4], - "parking": "true", - "rating": 5, - }, - { - "_index": "hotels-index", - "_id": "2", - "location": [5.2, 3.9], - "parking": "false", - "rating": 4, - }, - { - "_index": "hotels-index", - "_id": "3", - "location": [4.9, 3.4], - "parking": "true", - "rating": 9, - }, - { - "_index": "hotels-index", - "_id": "4", - "location": [4.2, 4.6], - "parking": "false", - "rating": 6, - }, - { - "_index": "hotels-index", - "_id": "5", - "location": [3.3, 4.5], - "parking": "true", - "rating": 8, - }, - { - "_index": "hotels-index", - "_id": "6", - "location": [6.4, 3.4], - "parking": "true", - "rating": 9, - }, - { - "_index": "hotels-index", - "_id": "7", - "location": [4.2, 6.2], - "parking": "true", - "rating": 5, - }, - { - "_index": "hotels-index", - "_id": "8", - "location": [2.4, 4.0], - "parking": "true", - "rating": 8, - }, - { - "_index": "hotels-index", - "_id": "9", - "location": [1.4, 3.2], - "parking": "false", - "rating": 5, - }, - { - "_index": "hotels-index", - "_id": "10", - "location": [7.0, 9.9], - "parking": "true", - "rating": 9, - }, - { - "_index": "hotels-index", - "_id": "11", - "location": [3.0, 2.3], - "parking": "false", - "rating": 6, - }, - { - "_index": "hotels-index", - "_id": "12", - "location": [5.0, 1.0], - "parking": "true", - "rating": 3, - }, -] - -helpers.bulk(client, vectors) - -client.indices.refresh(index=index_name) - -# search -search_query = { - "size": 3, - "query": { - "knn": { - "location": { - "vector": [5, 4], - "k": 3, - "filter": { - "bool": { - "must": [ - {"range": {"rating": {"gte": 8, "lte": 10}}}, - {"term": {"parking": "true"}}, - ] - } - }, - } - } - }, -} - -results = client.search(index=index_name, body=search_query) -for hit in results["hits"]["hits"]: - print(hit) - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/knn/knn-async-basics.py b/samples/knn/knn_async_basics.py similarity index 100% rename from samples/knn/knn-async-basics.py rename to samples/knn/knn_async_basics.py diff --git a/samples/knn/knn_basics.py b/samples/knn/knn_basics.py new file mode 100755 index 00000000..c74344b2 --- /dev/null +++ b/samples/knn/knn_basics.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +import random + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + dimensions = 5 + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "settings": {"index.knn": True}, + "mappings": { + "properties": { + "values": {"type": "knn_vector", "dimension": dimensions}, + } + }, + }, + ) + + # index data + vectors = [] + for i in range(10): + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + } + ) + + # bulk index + helpers.bulk(client, vectors) + + client.indices.refresh(index=index_name) + + # search + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + print(f"Searching for {vec} ...") + + search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} + results = client.search(index=index_name, body=search_query) + for hit in results["hits"]["hits"]: + print(hit) + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/knn/knn_boolean_filter.py b/samples/knn/knn_boolean_filter.py new file mode 100755 index 00000000..710216f2 --- /dev/null +++ b/samples/knn/knn_boolean_filter.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +import random + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + dimensions = 5 + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "settings": {"index.knn": True}, + "mappings": { + "properties": { + "values": {"type": "knn_vector", "dimension": dimensions}, + } + }, + }, + ) + + # index data + vectors = [] + genres = ["fiction", "drama", "romance"] + for i in range(3000): + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + "metadata": {"genre": random.choice(genres)}, + } + ) + + # bulk index + helpers.bulk(client, vectors) + + client.indices.refresh(index=index_name) + + # search + genre = random.choice(genres) + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + print(f"Searching for {vec} with the '{genre}' genre ...") + + search_query = { + "query": { + "bool": { + "filter": {"bool": {"must": [{"term": {"metadata.genre": genre}}]}}, + "must": {"knn": {"values": {"vector": vec, "k": 5}}}, + } + } + } + results = client.search(index=index_name, body=search_query) + for hit in results["hits"]["hits"]: + print(hit) + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/knn/knn_efficient_filter.py b/samples/knn/knn_efficient_filter.py new file mode 100755 index 00000000..dfe1308f --- /dev/null +++ b/samples/knn/knn_efficient_filter.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "hotels-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "settings": { + "index.knn": True, + "knn.algo_param.ef_search": 100, + "number_of_shards": 1, + "number_of_replicas": 0, + }, + "mappings": { + "properties": { + "location": { + "type": "knn_vector", + "dimension": 2, + "method": { + "name": "hnsw", + "space_type": "l2", + "engine": "lucene", + "parameters": {"ef_construction": 100, "m": 16}, + }, + }, + } + }, + }, + ) + + # index data + vectors = [ + { + "_index": "hotels-index", + "_id": "1", + "location": [5.2, 4.4], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "2", + "location": [5.2, 3.9], + "parking": "false", + "rating": 4, + }, + { + "_index": "hotels-index", + "_id": "3", + "location": [4.9, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "4", + "location": [4.2, 4.6], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "5", + "location": [3.3, 4.5], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "6", + "location": [6.4, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "7", + "location": [4.2, 6.2], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "8", + "location": [2.4, 4.0], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "9", + "location": [1.4, 3.2], + "parking": "false", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "10", + "location": [7.0, 9.9], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "11", + "location": [3.0, 2.3], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "12", + "location": [5.0, 1.0], + "parking": "true", + "rating": 3, + }, + ] + + helpers.bulk(client, vectors) + + client.indices.refresh(index=index_name) + + # search + search_query = { + "size": 3, + "query": { + "knn": { + "location": { + "vector": [5, 4], + "k": 3, + "filter": { + "bool": { + "must": [ + {"range": {"rating": {"gte": 8, "lte": 10}}}, + {"term": {"parking": "true"}}, + ] + } + }, + } + } + }, + } + + results = client.search(index=index_name, body=search_query) + for hit in results["hits"]["hits"]: + print(hit) + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/security/roles.py b/samples/security/roles.py index 8a2d1ef5..37558042 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -15,43 +15,49 @@ from opensearchpy import OpenSearch -# connect to OpenSearch - -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# Create a Role - -role_name = "test-role" - -role_content = { - "cluster_permissions": ["cluster_monitor"], - "index_permissions": [ - { - "index_patterns": ["index", "test-*"], - "allowed_actions": [ - "data_access", - "indices_monitor", - ], - } - ], -} - -response = client.security.create_role(role_name, body=role_content) -print(response) - -# Get a Role - -role_name = "test-role" - -response = client.security.get_role(role_name) -print(response) + +def main() -> None: + # connect to OpenSearch + + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # Create a Role + + role_name = "test-role" + + role_content = { + "cluster_permissions": ["cluster_monitor"], + "index_permissions": [ + { + "index_patterns": ["index", "test-*"], + "allowed_actions": [ + "data_access", + "indices_monitor", + ], + } + ], + } + + response = client.security.create_role(role_name, body=role_content) + print(response) + + # Get a Role + + role_name = "test-role" + + response = client.security.get_role(role_name) + print(response) + + +if __name__ == "__main__": + main() diff --git a/samples/security/users.py b/samples/security/users.py index 0a778b8d..3e1e90f5 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -15,31 +15,37 @@ from opensearchpy import OpenSearch -# connect to OpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. +def main() -> None: + # connect to OpenSearch -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. -# Create a User + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) -user_name = "test-user" -user_content = {"password": "opensearch@123", "opendistro_security_roles": []} + # Create a User -response = client.security.create_user(user_name, body=user_content) -print(response) + user_name = "test-user" + user_content = {"password": "opensearch@123", "opendistro_security_roles": []} -# Get a User + response = client.security.create_user(user_name, body=user_content) + print(response) -user_name = "test-user" + # Get a User -response = client.security.get_user(user_name) -print(response) + user_name = "test-user" + + response = client.security.get_user(user_name) + print(response) + + +if __name__ == "__main__": + main() diff --git a/setup.cfg b/setup.cfg index 1437bc0e..3d5fd003 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,13 +6,26 @@ requires = python python-urllib3 [flake8] ignore = E203, E266, E501, W503 +max-line-length = 240 [tool:pytest] junit_family=legacy asyncio_mode=auto -[tool:isort] +[isort] profile=black +[black] +max-line-length = 240 +target-version = 'py33' + [mypy] ignore_missing_imports=True + +[pylint] +max-line-length = 240 +good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names + +[pylint.MESSAGE CONTROL] +disable = all +enable = line-too-long, invalid-name diff --git a/setup.py b/setup.py index 6a0a5d13..6ad7254b 100644 --- a/setup.py +++ b/setup.py @@ -31,26 +31,26 @@ from setuptools import find_packages, setup -package_name = "opensearch-py" -package_version = "" -base_dir = abspath(dirname(__file__)) +PACKAGE_NAME = "opensearch-py" +PACKAGE_VERSION = "" +BASE_DIR = abspath(dirname(__file__)) -with open(join(base_dir, package_name.replace("-", ""), "_version.py")) as f: +with open(join(BASE_DIR, PACKAGE_NAME.replace("-", ""), "_version.py")) as f: data = f.read() m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) if m: - package_version = m.group(1) + PACKAGE_VERSION = m.group(1) else: raise Exception(f"Invalid version: {data}") -with open(join(base_dir, "README.md")) as f: +with open(join(BASE_DIR, "README.md")) as f: long_description = f.read().strip() -module_dir = package_name.replace("-", "") +MODULE_DIR = PACKAGE_NAME.replace("-", "") packages = [ package for package in find_packages(where=".", exclude=("test_opensearchpy*",)) - if package == module_dir or package.startswith(module_dir + ".") + if package == MODULE_DIR or package.startswith(MODULE_DIR + ".") ] install_requires = [ "urllib3>=1.26.18", @@ -77,13 +77,13 @@ generate_require = ["black", "jinja2"] setup( - name=package_name, + name=PACKAGE_NAME, description="Python client for OpenSearch", license="Apache-2.0", url="https://github.com/opensearch-project/opensearch-py", long_description=long_description, long_description_content_type="text/markdown", - version=package_version, + version=PACKAGE_VERSION, author="Aleksei Atavin, Denis Zalevskiy, Rushi Agrawal, Shephali Mittal", author_email="axeo@aiven.io, dez@aiven.io, rushi.agr@gmail.com, shephalm@amazon.com", maintainer="Aleksei Atavin, Denis Zalevskiy, Rushi Agrawal, Shephali Mittal", diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 743add7b..c9c0dc17 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -45,7 +45,7 @@ from opensearchpy.compat import reraise_exceptions from opensearchpy.connection import Connection, async_connections from opensearchpy.exceptions import ConnectionError, NotFoundError, TransportError -from test_opensearchpy.TestHttpServer import TestHTTPServer +from test_opensearchpy.test_http_server import TestHTTPServer pytestmark: MarkDecorator = pytest.mark.asyncio diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index d6ef0128..ff88fd6d 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -127,23 +127,23 @@ class Index: async def test_range_serializes_properly() -> None: - class D(document.AsyncDocument): + class DocumentD(document.AsyncDocument): lr: Any = field.LongRange() - d = D(lr=Range(lt=42)) + d = DocumentD(lr=Range(lt=42)) assert 40 in d.lr assert 47 not in d.lr assert {"lr": {"lt": 42}} == d.to_dict() - d = D(lr={"lt": 42}) + d = DocumentD(lr={"lt": 42}) assert {"lr": {"lt": 42}} == d.to_dict() async def test_range_deserializes_properly() -> None: - class D(InnerDoc): + class DocumentD(InnerDoc): lr = field.LongRange() - d = D.from_opensearch({"lr": {"lt": 42}}, True) + d = DocumentD.from_opensearch({"lr": {"lt": 42}}, True) assert isinstance(d.lr, Range) assert 40 in d.lr assert 47 not in d.lr @@ -156,15 +156,15 @@ async def test_resolve_nested() -> None: async def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: - class A(document.AsyncDocument): + class DocumentA(document.AsyncDocument): name = field.Text() - class B(document.AsyncDocument): + class DocumentB(document.AsyncDocument): name = field.Keyword() i = AsyncIndex("i") - i.document(A) - i.document(B) + i.document(DocumentA) + i.document(DocumentB) with raises(ValueError): i.to_dict() @@ -182,11 +182,11 @@ async def test_matches_uses_index() -> None: async def test_matches_with_no_name_always_matches() -> None: - class D(document.AsyncDocument): + class DocumentD(document.AsyncDocument): pass - assert D._matches({}) - assert D._matches({"_index": "whatever"}) + assert DocumentD._matches({}) + assert DocumentD._matches({"_index": "whatever"}) async def test_matches_accepts_wildcards() -> None: @@ -521,10 +521,10 @@ async def test_document_inheritance() -> None: async def test_child_class_can_override_parent() -> None: - class A(document.AsyncDocument): + class DocumentA(document.AsyncDocument): o = field.Object(dynamic=False, properties={"a": field.Text()}) - class B(A): + class DocumentB(DocumentA): o = field.Object(dynamic="strict", properties={"b": field.Text()}) assert { @@ -535,7 +535,7 @@ class B(A): "type": "object", } } - } == B._doc_type.mapping.to_dict() + } == DocumentB._doc_type.mapping.to_dict() async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index 3541fdec..90cbf2f4 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -35,13 +35,13 @@ class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): # type: ignore - async def asyncSetUp(self) -> None: + async def asyncSetUp(self) -> None: # pylint: disable=invalid-name self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self) -> None: + async def asyncTearDown(self) -> None: # pylint: disable=invalid-name wipe_cluster(self.client) if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 6751ed29..e5638a67 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -41,13 +41,13 @@ class TestSecurityPlugin(IsolatedAsyncioTestCase): # type: ignore USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - async def asyncSetUp(self) -> None: + async def asyncSetUp(self) -> None: # pylint: disable=invalid-name self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self) -> None: + async def asyncTearDown(self) -> None: # pylint: disable=invalid-name if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index b494f83f..179a573c 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -95,7 +95,7 @@ async def close(self) -> None: } }""" -CLUSTER_NODES_7x_PUBLISH_HOST = """{ +CLUSTER_NODES_7X_PUBLISH_HOST = """{ "_nodes" : { "total" : 1, "successful" : 1, @@ -270,7 +270,7 @@ async def test_add_connection(self) -> None: assert 2 == len(t.connection_pool.connections) assert "http://google.com:1234" == t.connection_pool.connections[1].host - async def test_request_will_fail_after_X_retries(self) -> None: + async def test_request_will_fail_after_x_retries(self) -> None: t: Any = AsyncTransport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -453,7 +453,7 @@ async def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t: Any = AsyncTransport( - [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], + [{"data": CLUSTER_NODES_7X_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, ) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index 62adf39f..f175990d 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -43,7 +43,7 @@ RequestError, TransportError, ) -from test_opensearchpy.TestHttpServer import TestHTTPServer +from test_opensearchpy.test_http_server import TestHTTPServer from ..test_cases import TestCase diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 8a23c218..97ae368a 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -59,14 +59,14 @@ def test_meta_from_dict() -> None: assert aggs.A(a.to_dict()) == a -def test_A_creates_proper_agg() -> None: +def test_aggs_creates_proper_agg() -> None: a = aggs.A("terms", field="tags") assert isinstance(a, aggs.Terms) assert a._params == {"field": "tags"} -def test_A_handles_nested_aggs_properly() -> None: +def test_aggs_handles_nested_aggs_properly() -> None: max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -74,12 +74,12 @@ def test_A_handles_nested_aggs_properly() -> None: assert a._params == {"field": "tags", "aggs": {"max_score": max_score}} -def test_A_passes_aggs_through() -> None: +def test_aggs_passes_aggs_through() -> None: a = aggs.A("terms", field="tags") assert aggs.A(a) is a -def test_A_from_dict() -> None: +def test_aggs_from_dict() -> None: d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -95,7 +95,7 @@ def test_A_from_dict() -> None: assert a.aggs.per_author == aggs.A("terms", field="author.raw") -def test_A_fails_with_incorrect_dict() -> None: +def test_aggs_fails_with_incorrect_dict() -> None: correct_d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -115,7 +115,7 @@ def test_A_fails_with_incorrect_dict() -> None: aggs.A(d) -def test_A_fails_with_agg_and_params() -> None: +def test_aggs_fails_with_agg_and_params() -> None: a = aggs.A("terms", field="tags") with raises(Exception): diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index 1a156ad8..bc6707a1 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -137,23 +137,23 @@ class Index: def test_range_serializes_properly() -> None: - class D(document.Document): + class DocumentD(document.Document): lr = field.LongRange() - d: Any = D(lr=Range(lt=42)) + d: Any = DocumentD(lr=Range(lt=42)) assert 40 in d.lr assert 47 not in d.lr assert {"lr": {"lt": 42}} == d.to_dict() - d = D(lr={"lt": 42}) + d = DocumentD(lr={"lt": 42}) assert {"lr": {"lt": 42}} == d.to_dict() def test_range_deserializes_properly() -> None: - class D(document.InnerDoc): + class DocumentD(document.InnerDoc): lr = field.LongRange() - d: Any = D.from_opensearch({"lr": {"lt": 42}}, True) + d: Any = DocumentD.from_opensearch({"lr": {"lt": 42}}, True) assert isinstance(d.lr, Range) assert 40 in d.lr assert 47 not in d.lr @@ -166,15 +166,15 @@ def test_resolve_nested() -> None: def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: - class A(document.Document): + class DocumentA(document.Document): name = field.Text() - class B(document.Document): + class DocumentB(document.Document): name = field.Keyword() i: Any = Index("i") - i.document(A) - i.document(B) + i.document(DocumentA) + i.document(DocumentB) with raises(ValueError): i.to_dict() @@ -192,11 +192,11 @@ def test_matches_uses_index() -> None: def test_matches_with_no_name_always_matches() -> None: - class D(document.Document): + class DocumentD(document.Document): pass - assert D._matches({}) - assert D._matches({"_index": "whatever"}) + assert DocumentD._matches({}) + assert DocumentD._matches({"_index": "whatever"}) def test_matches_accepts_wildcards() -> None: @@ -531,10 +531,10 @@ def test_document_inheritance() -> None: def test_child_class_can_override_parent() -> None: - class A(document.Document): + class DocumentA(document.Document): o = field.Object(dynamic=False, properties={"a": field.Text()}) - class B(A): + class DocumentB(DocumentA): o = field.Object(dynamic="strict", properties={"b": field.Text()}) assert { @@ -545,7 +545,7 @@ class B(A): "type": "object", } } - } == B._doc_type.mapping.to_dict() + } == DocumentB._doc_type.mapping.to_dict() def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 27790748..dbda0b91 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -32,7 +32,7 @@ from opensearchpy.helpers import function, query -def test_empty_Q_is_match_all() -> None: +def test_empty_query_is_match_all() -> None: q = query.Q() assert isinstance(q, query.MatchAll) @@ -389,57 +389,57 @@ class MyQuery(query.Query): assert query.Query._classes["my_query"] is MyQuery -def test_Q_passes_query_through() -> None: +def test_query_passes_query_through() -> None: q = query.Match(f="value1") assert query.Q(q) is q -def test_Q_constructs_query_by_name() -> None: +def test_query_constructs_query_by_name() -> None: q = query.Q("match", f="value") assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_translates_double_underscore_to_dots_in_param_names() -> None: +def test_query_translates_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza") assert {"comment.author": "honza"} == q._params -def test_Q_doesn_translate_double_underscore_to_dots_in_param_names() -> None: +def test_query_doesn_translate_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza", _expand__to_dot=False) assert {"comment__author": "honza"} == q._params -def test_Q_constructs_simple_query_from_dict() -> None: +def test_query_constructs_simple_query_from_dict() -> None: q = query.Q({"match": {"f": "value"}}) assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_constructs_compound_query_from_dict() -> None: +def test_query_constructs_compound_query_from_dict() -> None: q = query.Q({"bool": {"must": [{"match": {"f": "value"}}]}}) assert q == query.Bool(must=[query.Match(f="value")]) -def test_Q_raises_error_when_passed_in_dict_and_params() -> None: +def test_query_raises_error_when_passed_in_dict_and_params() -> None: with raises(Exception): query.Q({"match": {"f": "value"}}, f="value") -def test_Q_raises_error_when_passed_in_query_and_params() -> None: +def test_query_raises_error_when_passed_in_query_and_params() -> None: q = query.Match(f="value1") with raises(Exception): query.Q(q, f="value") -def test_Q_raises_error_on_unknown_query() -> None: +def test_query_raises_error_on_unknown_query() -> None: with raises(Exception): query.Q("not a query", f="value") diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index b6949833..d6139826 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -91,7 +91,7 @@ def test_attrlist_items_get_wrapped_during_iteration() -> None: assert isinstance(ls[3], utils.AttrDict) -def test_serializer_deals_with_Attr_versions() -> None: +def test_serializer_deals_with_attr_versions() -> None: d = utils.AttrDict({"key": utils.AttrList([1, 2, 3])}) assert serializer.serializer.dumps(d) == serializer.serializer.dumps( diff --git a/test_opensearchpy/TestHttpServer.py b/test_opensearchpy/test_http_server.py similarity index 82% rename from test_opensearchpy/TestHttpServer.py rename to test_opensearchpy/test_http_server.py index 3d8b31fb..844696ef 100644 --- a/test_opensearchpy/TestHttpServer.py +++ b/test_opensearchpy/test_http_server.py @@ -15,7 +15,9 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler): - def do_GET(self) -> None: + __test__ = False + + def do_GET(self) -> None: # pylint: disable=invalid-name headers = self.headers if self.path == "/redirect": @@ -28,14 +30,14 @@ def do_GET(self) -> None: self.end_headers() - Headers = {} + capitalized_headers = {} for header, value in headers.items(): capitalized_header = "-".join([word.title() for word in header.split("-")]) - Headers.update({capitalized_header: value}) - if "Connection" in Headers: - Headers.pop("Connection") + capitalized_headers.update({capitalized_header: value}) + if "Connection" in capitalized_headers: + capitalized_headers.pop("Connection") - data = {"method": "GET", "headers": Headers} + data = {"method": "GET", "headers": capitalized_headers} self.wfile.write(json.dumps(data).encode("utf-8")) diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 3249f41b..f5c6d8c7 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -469,68 +469,75 @@ def sync_runner(sync_client: Any) -> Any: YAML_TEST_SPECS = [] -# Try loading the REST API test specs from the Elastic Artifacts API -try: - # Construct the HTTP and OpenSearch client - http = urllib3.PoolManager(retries=10) - client = get_client() - - package_url = "https://github.com/opensearch-project/OpenSearch/archive/main.zip" - - # Download the zip and start reading YAML from the files in memory - package_zip = zipfile.ZipFile(io.BytesIO(http.request("GET", package_url).data)) - for yaml_file in package_zip.namelist(): - if not re.match( - r"^OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/.*\.ya?ml$", - yaml_file, - ): - continue - yaml_tests = list(yaml.safe_load_all(package_zip.read(yaml_file))) - - # Each file may have a "test" named 'setup' or 'teardown', - # these sets of steps should be run at the beginning and end - # of every other test within the file so we do one pass to capture those. - setup_steps = teardown_steps = None - test_numbers_and_steps = [] - test_number = 0 - - for yaml_test in yaml_tests: - test_name, test_step = yaml_test.popitem() - if test_name == "setup": - setup_steps = test_step - elif test_name == "teardown": - teardown_steps = test_step - else: - test_numbers_and_steps.append((test_number, test_step)) - test_number += 1 - - # Now we combine setup, teardown, and test_steps into - # a set of pytest.param() instances - for test_number, test_step in test_numbers_and_steps: - # Build the id from the name of the YAML file and - # the number within that file. Most important step - # is to remove most of the file path prefixes and - # the .yml suffix. - pytest_test_name = yaml_file.rpartition(".")[0].replace(".", "/") - for prefix in ("rest-api-spec/", "test/", "oss/"): - if pytest_test_name.startswith(prefix): - pytest_test_name = pytest_test_name[len(prefix) :] - pytest_param_id = "%s[%d]" % (pytest_test_name, test_number) - - pytest_param = { - "setup": setup_steps, - "run": test_step, - "teardown": teardown_steps, - } - # Skip either 'test_name' or 'test_name[x]' - if pytest_test_name in SKIP_TESTS or pytest_param_id in SKIP_TESTS: - pytest_param["skip"] = True - - YAML_TEST_SPECS.append(pytest.param(pytest_param, id=pytest_param_id)) - -except Exception as e: - warnings.warn("Could not load REST API tests: %s" % (str(e),)) +client = get_client() + + +def load_rest_api_tests() -> None: + # Try loading the REST API test specs from OpenSearch core. + try: + # Construct the HTTP and OpenSearch client + http = urllib3.PoolManager(retries=10) + package_url = ( + "https://github.com/opensearch-project/OpenSearch/archive/main.zip" + ) + + # Download the zip and start reading YAML from the files in memory + package_zip = zipfile.ZipFile(io.BytesIO(http.request("GET", package_url).data)) + for yaml_file in package_zip.namelist(): + if not re.match( + r"^OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/.*\.ya?ml$", + yaml_file, + ): + continue + yaml_tests = list(yaml.safe_load_all(package_zip.read(yaml_file))) + + # Each file may have a "test" named 'setup' or 'teardown', + # these sets of steps should be run at the beginning and end + # of every other test within the file so we do one pass to capture those. + setup_steps = teardown_steps = None + test_numbers_and_steps = [] + test_number = 0 + + for yaml_test in yaml_tests: + test_name, test_step = yaml_test.popitem() + if test_name == "setup": + setup_steps = test_step + elif test_name == "teardown": + teardown_steps = test_step + else: + test_numbers_and_steps.append((test_number, test_step)) + test_number += 1 + + # Now we combine setup, teardown, and test_steps into + # a set of pytest.param() instances + for test_number, test_step in test_numbers_and_steps: + # Build the id from the name of the YAML file and + # the number within that file. Most important step + # is to remove most of the file path prefixes and + # the .yml suffix. + pytest_test_name = yaml_file.rpartition(".")[0].replace(".", "/") + for prefix in ("rest-api-spec/", "test/", "oss/"): + if pytest_test_name.startswith(prefix): + pytest_test_name = pytest_test_name[len(prefix) :] + pytest_param_id = "%s[%d]" % (pytest_test_name, test_number) + + pytest_param = { + "setup": setup_steps, + "run": test_step, + "teardown": teardown_steps, + } + # Skip either 'test_name' or 'test_name[x]' + if pytest_test_name in SKIP_TESTS or pytest_param_id in SKIP_TESTS: + pytest_param["skip"] = True + + YAML_TEST_SPECS.append(pytest.param(pytest_param, id=pytest_param_id)) + + except Exception as e: + warnings.warn("Could not load REST API tests: %s" % (str(e),)) + + +load_rest_api_tests() if not RUN_ASYNC_REST_API_TESTS: diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 4b37e3ac..e299e23f 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -82,7 +82,7 @@ def perform_request(self, *args: Any, **kwargs: Any) -> Any: } }""" -CLUSTER_NODES_7x_PUBLISH_HOST = """{ +CLUSTER_NODES_7X_PUBLISH_HOST = """{ "_nodes" : { "total" : 1, "successful" : 1, @@ -264,7 +264,7 @@ def test_add_connection(self) -> None: "http://google.com:1234", t.connection_pool.connections[1].host ) - def test_request_will_fail_after_X_retries(self) -> None: + def test_request_will_fail_after_x_retries(self) -> None: t: Any = Transport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -408,7 +408,7 @@ def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t: Any = Transport( - [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], + [{"data": CLUSTER_NODES_7X_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, ) diff --git a/utils/build-dists.py b/utils/build_dists.py similarity index 89% rename from utils/build-dists.py rename to utils/build_dists.py index bca9c154..137542b4 100644 --- a/utils/build-dists.py +++ b/utils/build_dists.py @@ -40,25 +40,25 @@ import tempfile from typing import Any -base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -tmp_dir = None +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +TMP_DIR = None @contextlib.contextmanager # type: ignore def set_tmp_dir() -> None: - global tmp_dir - tmp_dir = tempfile.mkdtemp() - yield tmp_dir - shutil.rmtree(tmp_dir) - tmp_dir = None + global TMP_DIR + TMP_DIR = tempfile.mkdtemp() + yield TMP_DIR + shutil.rmtree(TMP_DIR) + TMP_DIR = None def run(*argv: Any, expect_exit_code: int = 0) -> None: - global tmp_dir - if tmp_dir is None: - os.chdir(base_dir) + global TMP_DIR + if TMP_DIR is None: + os.chdir(BASE_DIR) else: - os.chdir(tmp_dir) + os.chdir(TMP_DIR) cmd = " ".join(shlex.quote(x) for x in argv) print("$ " + cmd) @@ -132,7 +132,7 @@ def test_dist(dist: Any) -> None: "-m", "mypy", "--strict", - os.path.join(base_dir, "test_opensearchpy/test_types/async_types.py"), + os.path.join(BASE_DIR, "test_opensearchpy/test_types/async_types.py"), ) # Ensure that the namespaces are correct for the dist @@ -153,7 +153,7 @@ def test_dist(dist: Any) -> None: "-m", "mypy", "--strict", - os.path.join(base_dir, "test_opensearchpy/test_types/sync_types.py"), + os.path.join(BASE_DIR, "test_opensearchpy/test_types/sync_types.py"), ) else: run( @@ -161,7 +161,7 @@ def test_dist(dist: Any) -> None: "-m", "mypy", "--strict", - os.path.join(base_dir, "test_opensearchpy/test_types/aliased_types.py"), + os.path.join(BASE_DIR, "test_opensearchpy/test_types/aliased_types.py"), ) # Uninstall the dist, see that we can't import things anymore @@ -187,7 +187,7 @@ def main() -> None: run("python", "setup.py", "sdist", "bdist_wheel") # Grab the major version to be used as a suffix. - version_path = os.path.join(base_dir, "opensearchpy/_version.py") + version_path = os.path.join(BASE_DIR, "opensearchpy/_version.py") with open(version_path) as f: data = f.read() m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) @@ -249,12 +249,12 @@ def main() -> None: # Rename the module to fit the suffix. shutil.move( - os.path.join(base_dir, "opensearchpy"), - os.path.join(base_dir, "opensearchpy%s" % suffix), + os.path.join(BASE_DIR, "opensearchpy"), + os.path.join(BASE_DIR, "opensearchpy%s" % suffix), ) # Ensure that the version within 'opensearchpy/_version.py' is correct. - version_path = os.path.join(base_dir, f"opensearchpy{suffix}/_version.py") + version_path = os.path.join(BASE_DIR, f"opensearchpy{suffix}/_version.py") with open(version_path) as f: version_data = f.read() version_data = re.sub( @@ -267,16 +267,16 @@ def main() -> None: f.write(version_data) # Rewrite setup.py with the new name. - setup_py_path = os.path.join(base_dir, "setup.py") + setup_py_path = os.path.join(BASE_DIR, "setup.py") with open(setup_py_path) as f: setup_py = f.read() with open(setup_py_path, "w") as f: f.truncate() - assert 'package_name = "opensearch-py"' in setup_py + assert 'PACKAGE_NAME = "opensearch-py"' in setup_py f.write( setup_py.replace( - 'package_name = "opensearch-py"', - 'package_name = "opensearch-py%s"' % suffix, + 'PACKAGE_NAME = "opensearch-py"', + 'PACKAGE_NAME = "opensearch-py%s"' % suffix, ) ) @@ -289,10 +289,10 @@ def main() -> None: run("rm", "-rf", "opensearchpy%s/" % suffix) # Test everything that got created - dists = os.listdir(os.path.join(base_dir, "dist")) + dists = os.listdir(os.path.join(BASE_DIR, "dist")) assert len(dists) == 4 for dist in dists: - test_dist(os.path.join(base_dir, "dist", dist)) + test_dist(os.path.join(BASE_DIR, "dist", dist)) os.system("chmod a+w dist/*") # After this run 'python -m twine upload dist/*' diff --git a/utils/generate-api.py b/utils/generate_api.py similarity index 93% rename from utils/generate-api.py rename to utils/generate_api.py index 792446dd..d671a975 100644 --- a/utils/generate-api.py +++ b/utils/generate_api.py @@ -13,10 +13,10 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. # -# Licensed to Elasticsearch B.V. under one or more contributor +# Licensed to Elasticsearch b.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under +# ownership. Elasticsearch b.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -143,14 +143,15 @@ def dump(self) -> None: self.sort() # This code snippet adds headers to each generated module indicating that the code is generated. - header_separator = "# -----------------------------------------------------" - License_header_end_1 = "# GitHub history for details." - License_header_end_2 = "# under the License." + # The separator is the last line in the "THIS CODE IS AUTOMATICALLY GENERATED" header. + header_separator = "# -----------------------------------------------------------------------------------------+" + license_header_end_1 = "# GitHub history for details." + license_header_end_2 = "# under the License." update_header = True - License_position = 0 + license_position = 0 - # Identifying the insertion point for the "THIS CODE IS GENERATED" header. + # Identifying the insertion point for the "THIS CODE IS AUTOMATICALLY GENERATED" header. if os.path.exists(self.filepath): with open(self.filepath, "r") as f: content = f.read() @@ -160,20 +161,20 @@ def dump(self) -> None: content.find(header_separator) + len(header_separator) + 2 ) header_position = content.rfind("\n", 0, header_end_position) + 1 - if License_header_end_1 in content: - if License_header_end_2 in content: + if license_header_end_1 in content: + if license_header_end_2 in content: position = ( - content.find(License_header_end_2) - + len(License_header_end_2) + content.find(license_header_end_2) + + len(license_header_end_2) + 2 ) else: position = ( - content.find(License_header_end_1) - + len(License_header_end_1) + content.find(license_header_end_1) + + len(license_header_end_1) + 2 ) - License_position = content.rfind("\n", 0, position) + 1 + license_position = content.rfind("\n", 0, position) + 1 current_script_folder = os.path.dirname(os.path.abspath(__file__)) generated_file_header_path = os.path.join( @@ -190,12 +191,12 @@ def dump(self) -> None: with open(self.filepath, "w") as f: if update_header is True: f.write( - self.header[:License_position] + self.header[:license_position] + "\n" + header_content + "\n\n" + "#replace_token#\n" - + self.header[License_position:] + + self.header[license_position:] ) else: f.write( @@ -470,21 +471,21 @@ def read_modules() -> Any: parts_new = {} for m in params: - A = dict(type=m["schema"]["type"], description=m["description"]) + a = dict(type=m["schema"]["type"], description=m["description"]) if "default" in m["schema"]: - A.update({"default": m["schema"]["default"]}) + a.update({"default": m["schema"]["default"]}) if "enum" in m["schema"]: - A.update({"type": "enum"}) - A.update({"options": m["schema"]["enum"]}) + a.update({"type": "enum"}) + a.update({"options": m["schema"]["enum"]}) if "deprecated" in m["schema"]: - A.update({"deprecated": m["schema"]["deprecated"]}) - A.update( + a.update({"deprecated": m["schema"]["deprecated"]}) + a.update( {"deprecation_message": m["schema"]["x-deprecation-message"]} ) - params_new.update({m["name"]: A}) + params_new.update({m["name"]: a}) # Removing the deprecated "type" if p["x-operation-group"] != "nodes.hot_threads" and "type" in params_new: @@ -502,17 +503,17 @@ def read_modules() -> Any: p.pop("parameters") for n in parts: - B = dict(type=n["schema"]["type"]) + b = dict(type=n["schema"]["type"]) if "description" in n: - B.update({"description": n["description"]}) + b.update({"description": n["description"]}) if "x-enum-options" in n["schema"]: - B.update({"options": n["schema"]["x-enum-options"]}) + b.update({"options": n["schema"]["x-enum-options"]}) deprecated_new = {} if "deprecated" in n: - B.update({"deprecated": n["deprecated"]}) + b.update({"deprecated": n["deprecated"]}) if "x-deprecation-version" in n: deprecated_new.update({"version": n["x-deprecation-version"]}) @@ -522,7 +523,7 @@ def read_modules() -> Any: {"description": n["x-deprecation-description"]} ) - parts_new.update({n["name"]: B}) + parts_new.update({n["name"]: b}) if bool(parts_new): p.update({"parts": parts_new}) diff --git a/utils/generated_file_headers.txt b/utils/generated_file_headers.txt index 135828ce..ff260c5a 100644 --- a/utils/generated_file_headers.txt +++ b/utils/generated_file_headers.txt @@ -1,8 +1,7 @@ -# ---------------------------------------------------- -# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# ------------------------------------------------------------------------------------------ +# THIS CODE IS AUTOMATICALLY GENERATED AND MANUAL EDITS WILL BE LOST # -# To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or the "OpenSearch API specification" available at: -# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- +# To contribute, kindly make modifications in the opensearch-py client generator +# or in the OpenSearch API specification, and run `nox -rs generate`. See DEVELOPER_GUIDE.md +# and https://github.com/opensearch-project/opensearch-api-specification for details. +# -----------------------------------------------------------------------------------------+ diff --git a/utils/license-headers.py b/utils/license_headers.py similarity index 90% rename from utils/license-headers.py rename to utils/license_headers.py index e0f31b59..903f176d 100644 --- a/utils/license-headers.py +++ b/utils/license_headers.py @@ -18,9 +18,9 @@ import sys from typing import Iterator, List -lines_to_keep = ["# -*- coding: utf-8 -*-", "#!/usr/bin/env python"] +LINES_TO_KEEP = ["# -*- coding: utf-8 -*-", "#!/usr/bin/env python"] -license_header = """ +LICENSE_HEADER = """ # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -54,14 +54,14 @@ def does_file_need_fix(filepath: str) -> bool: with open(filepath, mode="r") as f: for line in f: line = line.strip() - if len(line) == 0 or line in lines_to_keep: + if len(line) == 0 or line in LINES_TO_KEEP: pass elif line[0] == "#": existing_header += line existing_header += "\n" else: break - return not existing_header.startswith(license_header) + return not existing_header.startswith(LICENSE_HEADER) def add_header_to_file(filepath: str) -> None: @@ -69,9 +69,9 @@ def add_header_to_file(filepath: str) -> None: lines = list(f) i = 0 for i, line in enumerate(lines): - if len(line) > 0 and line not in lines_to_keep: + if len(line) > 0 and line not in LINES_TO_KEEP: break - lines = lines[:i] + [license_header] + lines[i:] + lines = lines[:i] + [LICENSE_HEADER] + lines[i:] with open(filepath, mode="w") as f: f.truncate() f.write("".join(lines)) From 09d4dba478880d52e58066dd8b99e5c67a2a2883 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Tue, 21 Nov 2023 15:55:24 -0500 Subject: [PATCH 66/80] Enabled pylint:pointless-statement. (#611) Signed-off-by: dblock Signed-off-by: roma2023 --- CHANGELOG.md | 3 ++- setup.cfg | 2 +- .../test_async/test_helpers/test_document.py | 6 +++--- test_opensearchpy/test_helpers/test_document.py | 6 +++--- test_opensearchpy/test_helpers/test_result.py | 8 ++++---- 5 files changed, 13 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 891157b9..010e4113 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,8 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added -- Added pylint, enforcing `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) +- Added pylint `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) +- Added pylint `pointless-statement` ([#611](https://github.com/opensearch-project/opensearch-py/pull/611)) ### Changed ### Deprecated ### Removed diff --git a/setup.cfg b/setup.cfg index 3d5fd003..301efb34 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,4 +28,4 @@ good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names [pylint.MESSAGE CONTROL] disable = all -enable = line-too-long, invalid-name +enable = line-too-long, invalid-name, pointless-statement diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index ff88fd6d..30822751 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -343,10 +343,10 @@ async def test_doc_type_can_be_correctly_pickled() -> None: async def test_meta_is_accessible_even_on_empty_doc() -> None: d = MyDoc() - d.meta + assert d.meta == {} d = MyDoc(title="aaa") - d.meta + assert d.meta == {} async def test_meta_field_mapping() -> None: @@ -404,7 +404,7 @@ def password(self, pwd: Any) -> None: assert u.check_password(b"not-secret") with raises(AttributeError): - u.password + assert u.password async def test_nested_can_be_assigned_to() -> None: diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index bc6707a1..89ee25ca 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -353,10 +353,10 @@ def test_doc_type_can_be_correctly_pickled() -> None: def test_meta_is_accessible_even_on_empty_doc() -> None: d1: Any = MyDoc() - d1.meta + assert d1.meta == {} d2: Any = MyDoc(title="aaa") - d2.meta + assert d2.meta == {} def test_meta_field_mapping() -> None: @@ -414,7 +414,7 @@ def password(self, pwd: Any) -> None: assert u.check_password(b"not-secret") with raises(AttributeError): - u.password + assert u.password def test_nested_can_be_assigned_to() -> None: diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index 296553f3..d3717d37 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -43,7 +43,7 @@ def agg_response(aggs_search: Any, aggs_data: Any) -> Any: def test_agg_response_is_pickleable(agg_response: Any) -> None: - agg_response.hits + assert agg_response.hits == [] r = pickle.loads(pickle.dumps(agg_response)) assert r == agg_response @@ -53,7 +53,7 @@ def test_agg_response_is_pickleable(agg_response: Any) -> None: def test_response_is_pickleable(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) - res.hits + assert res.hits r = pickle.loads(pickle.dumps(res)) assert r == res @@ -146,10 +146,10 @@ def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response: Any) -> No assert "Honza" == res.hits[2].name.first with raises(KeyError): - h["not_there"] + assert h["not_there"] with raises(AttributeError): - h.not_there + assert h.not_there def test_slicing_on_response_slices_on_hits(dummy_response: Any) -> None: From 46025a2d637d1e6884c0bde8a5988886ef2864d2 Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Wed, 22 Nov 2023 06:14:12 -0800 Subject: [PATCH 67/80] Added a guide & sample for a custom logger client implementation. (#579) * Added a guide & sample for a custom logger client implementation. Signed-off-by: Djcarrillo6 Black formatter Signed-off-by: Djcarrillo6 * Changes from PR review Signed-off-by: Djcarrillo6 Fixed import formatting in sample code for gudie. Signed-off-by: Djcarrillo6 Fixed nox formatting of log collection sample module. Signed-off-by: Djcarrillo6 Added types to log_collection_sample.py Signed-off-by: Djcarrillo6 Added type ignore to StramHandler class Signed-off-by: Djcarrillo6 Added formatting change Signed-off-by: Djcarrillo6 * Added PR review changes. Signed-off-by: Djcarrillo6 Fixed typo in CHANGELOG. Signed-off-by: Djcarrillo6 Requested changes. Signed-off-by: Djcarrillo6 Requested changes again. Signed-off-by: Djcarrillo6 Added link in USER_GUIDE.md. Signed-off-by: Djcarrillo6 --------- Signed-off-by: Djcarrillo6 Signed-off-by: roma2023 --- CHANGELOG.md | 1 + USER_GUIDE.md | 1 + guides/log_collection.md | 169 +++++++++++++++++++++++ samples/logging/log_collection_sample.py | 116 ++++++++++++++++ 4 files changed, 287 insertions(+) create mode 100644 guides/log_collection.md create mode 100644 samples/logging/log_collection_sample.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 010e4113..4aa99496 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Added - Added pylint `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) - Added pylint `pointless-statement` ([#611](https://github.com/opensearch-project/opensearch-py/pull/611)) +- Added a log collection guide ([#579](https://github.com/opensearch-project/opensearch-py/pull/579)) ### Changed ### Deprecated ### Removed diff --git a/USER_GUIDE.md b/USER_GUIDE.md index 753485e5..5f38b535 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -159,6 +159,7 @@ print(response) - [Making Raw JSON REST Requests](guides/json.md) - [Connection Classes](guides/connection_classes.md) - [Document Lifecycle](guides/document_lifecycle.md) +- [Collecting Logs](guides/log_collection.md) ## Plugins diff --git a/guides/log_collection.md b/guides/log_collection.md new file mode 100644 index 00000000..ed07c4da --- /dev/null +++ b/guides/log_collection.md @@ -0,0 +1,169 @@ +- [Log Collection Guide](#log-collection-guide) +- [Import Required Modules](#import-required-modules) +- [Download and Start OpenSearch](#download-and-start-opensearch) +- [Setup Connection with OpenSearch](#setup-connection-with-opensearch) +- [Initialize Logger](#initialize-logger) +- [Custom Handler For Logs](#custom-handler-for-logs) +- [Create OpenSearch Handler and Add to Logger](#create-opensearch-handler-and-add-to-logger) +- [Setup Asynchronous Logging Using Queues](#setup-asynchronous-logging-using-queues) +- [Clean Up](#clean-up) +- [Sample Code](#sample-code) + + +## Log Collection Guide +In this guide, we will look at how to collect logs from your application and send them to OpenSearch. + +## Import Required Modules +Let's import the required modules: + +```python +import logging +import queue +from datetime import datetime +from logging.handlers import QueueHandler, QueueListener +from typing import Any + +import urllib3 + +from opensearchpy import OpenSearch + +urllib3.disable_warnings() +``` + +## Download and Start OpenSearch +``` +docker pull opensearchproject/opensearch:latest +``` + +``` +docker run -d -p 9200:9200 -p 9600:9600 --name opensearch_opensearch_1 -e "discovery.type=single-node" opensearchproject/opensearch:latest +``` + +## Setup Connection with OpenSearch + +Create a client instance: +```python + opensearch_client: Any = OpenSearch( + "https://admin:admin@localhost:9200", + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + http_auth=("admin", "admin"), + ) +``` + +## Initialize Logger +Initialize a logger, named "OpenSearchLogs", that emits logs to OpenSearch, and a console handler, both set to the INFO level, are initialized. The console handler is then added to the logger. For every log line processed by this setup, a corresponding OpenSearch document is created. This approach supports structured and comprehensive logging because each document can include extensive metadata within it. + +```python + # Initialize a logger named "OpenSearchLogs" for OpenSearch & set log level to INFO + print("Initializing logger...") + os_logger = logging.getLogger("OpenSearchLogs") + os_logger.setLevel(logging.INFO) + + # Create a console handler + console_handler = logging.StreamHandler() + console_handler.setLevel(logging.INFO) + + # Add console handler to the logger + os_logger.addHandler(console_handler) +``` + +## Custom Handler For Logs +Define a custom handler that logs to OpenSearch: + +```python +class OpenSearchHandler(logging.Handler): + # Initializer / Instance attributes + def __init__(self, opensearch_client): + logging.Handler.__init__(self) + self.opensearch_client = opensearch_client + + # Build index name (e.g., "logs-YYYY-MM-DD") + def _build_index_name(self): + return f"logs-{datetime.date(datetime.now())}" + + # Emit logs to the OpenSearch cluster + def emit(self, record): + document = { + "timestamp": datetime.fromtimestamp(record.created).isoformat(), + "name": record.name, + "level": record.levelname, + "message": record.getMessage(), + "source": { + "file": record.pathname, + "line": record.lineno, + "function": record.funcName, + }, + "process": { + "id": record.process, + "name": record.processName + }, + "thread": { + "id": record.thread, + "name": record.threadName + }, + } + + # Write the log entry to OpenSearch, handle exceptions + self.opensearch_client.index( + index=self._build_index_name(), + body=document, + ) +``` + +## Create OpenSearch Handler and Add to Logger +Create an instance of OpenSearchHandler and add it to the logger: + +```python + print("Creating an instance of OpenSearchHandler and adding it to the logger...") + # Create an instance of OpenSearchHandler and add it to the logger + os_handler = OpenSearchHandler(opensearch_client) + os_logger.addHandler(os_handler) +``` + +## Setup Asynchronous Logging Using Queues +Finally, let's setup asynchronous logging using Queues: + +```python + print("Setting up asynchronous logging using Queues...") + # Setup asynchronous logging using Queues + log_queue = queue.Queue(-1) # no limit on size + os_queue_handler = QueueHandler(log_queue) + os_queue_listener = QueueListener(log_queue, os_handler) + + # Add queue handler to the logger + os_logger.addHandler(os_queue_handler) + + # Start listening on the queue using the os_queue_listener + os_queue_listener.start() +``` + +## Clean Up +Finally, let's clean up by stopping the queue listener: + +```python + print("Cleaning up...") + # Stop listening on the queue + os_queue_listener.stop() + print("Log Collection Guide has completed running") +``` + +## Sample Code +See [log_collection_sample.py](/samples/logging/log_collection_sample.py) for a working sample of the concepts in this guide. The script will create a logger named "OpenSearchLogs" and set the log level to INFO. It will then create an instance of OpenSearchHandler and add it to the logger. Finally, it will setup asynchronous logging using Queues and send a test log to the OpenSearch cluster. + +Exptected Output From Running [log_collection_sample.py](/samples/logging/log_collection_sample.py): + +``` + """ + Running Log Collection Guide + Setting up connection with OpenSearch cluster... + Initializing logger... + Creating an instance of OpenSearchHandler and adding it to the logger... + Setting up asynchronous logging using Queues... + Logger is set up and listener has started. Sending a test log... + This is a test log message + Cleaning up... + Log Collection Guide has completed running + """ +``` \ No newline at end of file diff --git a/samples/logging/log_collection_sample.py b/samples/logging/log_collection_sample.py new file mode 100644 index 00000000..1e85b977 --- /dev/null +++ b/samples/logging/log_collection_sample.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging +import queue +from datetime import datetime +from logging.handlers import QueueHandler, QueueListener +from typing import Any + +import urllib3 + +from opensearchpy import OpenSearch + +urllib3.disable_warnings() + + +def main() -> None: + print("Collecting logs.") + + # Create a console handler + console_handler: logging.StreamHandler = logging.StreamHandler() # type: ignore + console_handler.setLevel(logging.INFO) + + # Setup connection with the OpenSearch cluster + print("Setting up connection with OpenSearch cluster...") + opensearch_client: Any = OpenSearch( + "https://admin:admin@localhost:9200", + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + http_auth=("admin", "admin"), + ) + + # Initialize a logger named "OpenSearchLogs" for OpenSearch + print("Initializing logger...") + os_logger: logging.Logger = logging.getLogger("OpenSearchLogs") + os_logger.setLevel(logging.INFO) + + # Add console handler to the logger + os_logger.addHandler(console_handler) + + # Define a custom handler that logs to OpenSearch + class OpenSearchHandler(logging.Handler): + # Initializer / Instance attributes + def __init__(self, opensearch_client: Any) -> None: + super().__init__() + self.os_client = opensearch_client + + # Build index name (e.g., "logs-YYYY-MM-DD") + def _build_index_name(self) -> str: + return f"logs-{datetime.date(datetime.now())}" + + # Emit logs to the OpenSearch cluster + def emit(self, record: logging.LogRecord) -> None: + document = { + "timestamp": datetime.fromtimestamp(record.created).isoformat(), + "name": record.name, + "level": record.levelname, + "message": record.getMessage(), + "source": { + "file": record.pathname, + "line": record.lineno, + "function": record.funcName, + }, + "process": {"id": record.process, "name": record.processName}, + "thread": {"id": record.thread, "name": record.threadName}, + } + + try: + self.os_client.index( + index=self._build_index_name(), + body=document, + ) + except Exception as e: + print(f"Failed to send log to OpenSearch: {e}") + logging.warning(f"Failed to send log to OpenSearch: {e}") + raise + + print("Creating an instance of OpenSearchHandler and adding it to the logger...") + # Create an instance of OpenSearchHandler and add it to the logger + os_handler: OpenSearchHandler = OpenSearchHandler(opensearch_client) + os_logger.addHandler(os_handler) + + print("Setting up asynchronous logging using Queues...") + # Setup asynchronous logging using Queues + log_queue: queue.Queue[logging.LogRecord] = queue.Queue(-1) # no limit on size + os_queue_handler: logging.Handler = QueueHandler(log_queue) + os_queue_listener: QueueListener = QueueListener(log_queue, os_handler) + + # Add queue handler to the logger + os_logger.addHandler(os_queue_handler) + + # Start listening on the queue using the os_queue_listener + os_queue_listener.start() + + print("Logger is set up and listener has started. Sending a test log...") + # Logging a test message + os_logger.info("This is a test log message") + + print("Cleaning up...") + # Stop listening on the queue + os_queue_listener.stop() + print("Log Collection Guide has completed running") + + +if __name__ == "__main__": + main() From 51909bb11a606a50be074c726e2b58cd6396112f Mon Sep 17 00:00:00 2001 From: Zelin Hao Date: Wed, 22 Nov 2023 17:49:47 -0800 Subject: [PATCH 68/80] Update the GHA release workflow with trusted publisher enabled (#614) * Add GitHub action for opensearch-py release Signed-off-by: Zelin Hao * Generate GitHub release at the end Signed-off-by: Zelin Hao * Update CHANGELOG Signed-off-by: Zelin Hao * Update CHANGELOG Signed-off-by: Zelin Hao --------- Signed-off-by: Zelin Hao Signed-off-by: roma2023 --- .github/workflows/release-drafter.yml | 24 +++++++++++++++++++++++- CHANGELOG.md | 1 + 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 3c17bc3e..fe8cb0ca 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -32,11 +32,33 @@ jobs: - name: Build project for distribution run: | python -m build + - name: upload windows dists + uses: actions/upload-artifact@v3 + with: + name: release-dists + path: dist/ + + pypi-publish: + runs-on: ubuntu-latest + needs: + - draft-a-release + permissions: + id-token: write + contents: write + steps: + - name: Retrieve release distributions + uses: actions/download-artifact@v3 + with: + name: release-dists + path: dist + - name: Generate the artifacts + run: | tar -zvcf artifacts.tar.gz dist + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 - name: Release uses: softprops/action-gh-release@v1 with: - draft: true generate_release_notes: true files: | artifacts.tar.gz diff --git a/CHANGELOG.md b/CHANGELOG.md index 4aa99496..9a9b476d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added pylint `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) - Added pylint `pointless-statement` ([#611](https://github.com/opensearch-project/opensearch-py/pull/611)) - Added a log collection guide ([#579](https://github.com/opensearch-project/opensearch-py/pull/579)) +- Added GHA release ([#614](https://github.com/opensearch-project/opensearch-py/pull/614)) ### Changed ### Deprecated ### Removed From f7a018f35c0ac1e894ee48a3094de611aacf8f9f Mon Sep 17 00:00:00 2001 From: Samuel Orji Date: Fri, 24 Nov 2023 21:19:50 +0000 Subject: [PATCH 69/80] remove unnecessary utf-8 header in .py files (#615) * remove unnecessary utf-8 header in .py files Signed-off-by: samuel orji * review feedback: add link to changelog Signed-off-by: samuel orji --------- Signed-off-by: samuel orji Signed-off-by: roma2023 --- CHANGELOG.md | 1 + benchmarks/bench_async.py | 1 - benchmarks/bench_info_sync.py | 1 - benchmarks/bench_sync.py | 1 - benchmarks/bench_sync_async.py | 1 - benchmarks/thread_with_return_value.py | 1 - noxfile.py | 1 - opensearchpy/__init__.py | 1 - opensearchpy/_async/__init__.py | 1 - opensearchpy/_async/_extra_imports.py | 1 - opensearchpy/_async/client/__init__.py | 1 - opensearchpy/_async/client/_patch.py | 1 - opensearchpy/_async/client/cat.py | 1 - opensearchpy/_async/client/client.py | 1 - opensearchpy/_async/client/cluster.py | 1 - opensearchpy/_async/client/dangling_indices.py | 1 - opensearchpy/_async/client/features.py | 1 - opensearchpy/_async/client/http.py | 1 - opensearchpy/_async/client/indices.py | 1 - opensearchpy/_async/client/ingest.py | 1 - opensearchpy/_async/client/nodes.py | 1 - opensearchpy/_async/client/plugins.py | 1 - opensearchpy/_async/client/remote.py | 1 - opensearchpy/_async/client/remote_store.py | 1 - opensearchpy/_async/client/security.py | 1 - opensearchpy/_async/client/snapshot.py | 1 - opensearchpy/_async/client/tasks.py | 1 - opensearchpy/_async/client/utils.py | 1 - opensearchpy/_async/compat.py | 1 - opensearchpy/_async/helpers/__init__.py | 1 - opensearchpy/_async/helpers/actions.py | 1 - opensearchpy/_async/helpers/document.py | 1 - opensearchpy/_async/helpers/faceted_search.py | 1 - opensearchpy/_async/helpers/index.py | 1 - opensearchpy/_async/helpers/mapping.py | 1 - opensearchpy/_async/helpers/search.py | 1 - opensearchpy/_async/helpers/test.py | 1 - opensearchpy/_async/helpers/update_by_query.py | 1 - opensearchpy/_async/http_aiohttp.py | 1 - opensearchpy/_async/plugins/__init__.py | 1 - opensearchpy/_async/plugins/alerting.py | 1 - opensearchpy/_async/plugins/index_management.py | 1 - opensearchpy/_async/transport.py | 1 - opensearchpy/_version.py | 1 - opensearchpy/client/__init__.py | 1 - opensearchpy/client/_patch.py | 1 - opensearchpy/client/cat.py | 1 - opensearchpy/client/client.py | 1 - opensearchpy/client/cluster.py | 1 - opensearchpy/client/dangling_indices.py | 1 - opensearchpy/client/features.py | 1 - opensearchpy/client/http.py | 1 - opensearchpy/client/indices.py | 1 - opensearchpy/client/ingest.py | 1 - opensearchpy/client/nodes.py | 1 - opensearchpy/client/plugins.py | 1 - opensearchpy/client/remote.py | 1 - opensearchpy/client/remote_store.py | 1 - opensearchpy/client/security.py | 1 - opensearchpy/client/snapshot.py | 1 - opensearchpy/client/tasks.py | 1 - opensearchpy/client/utils.py | 1 - opensearchpy/compat.py | 1 - opensearchpy/connection/__init__.py | 1 - opensearchpy/connection/async_connections.py | 1 - opensearchpy/connection/base.py | 1 - opensearchpy/connection/connections.py | 1 - opensearchpy/connection/http_async.py | 1 - opensearchpy/connection/http_requests.py | 1 - opensearchpy/connection/http_urllib3.py | 1 - opensearchpy/connection/pooling.py | 1 - opensearchpy/connection_pool.py | 1 - opensearchpy/exceptions.py | 1 - opensearchpy/helpers/__init__.py | 1 - opensearchpy/helpers/actions.py | 1 - opensearchpy/helpers/aggs.py | 1 - opensearchpy/helpers/analysis.py | 1 - opensearchpy/helpers/asyncsigner.py | 1 - opensearchpy/helpers/document.py | 1 - opensearchpy/helpers/errors.py | 1 - opensearchpy/helpers/faceted_search.py | 1 - opensearchpy/helpers/field.py | 1 - opensearchpy/helpers/function.py | 1 - opensearchpy/helpers/index.py | 1 - opensearchpy/helpers/mapping.py | 1 - opensearchpy/helpers/query.py | 1 - opensearchpy/helpers/response/__init__.py | 1 - opensearchpy/helpers/response/aggs.py | 1 - opensearchpy/helpers/response/hit.py | 1 - opensearchpy/helpers/search.py | 1 - opensearchpy/helpers/signer.py | 1 - opensearchpy/helpers/test.py | 1 - opensearchpy/helpers/update_by_query.py | 1 - opensearchpy/helpers/utils.py | 1 - opensearchpy/helpers/wrappers.py | 1 - opensearchpy/plugins/__init__.py | 1 - opensearchpy/plugins/alerting.py | 1 - opensearchpy/plugins/index_management.py | 1 - opensearchpy/serializer.py | 1 - opensearchpy/transport.py | 1 - samples/advanced_index_actions/advanced_index_actions_sample.py | 1 - samples/aws/search_requests.py | 1 - samples/aws/search_urllib3.py | 1 - samples/bulk/bulk_array.py | 1 - samples/bulk/bulk_helpers.py | 1 - samples/bulk/bulk_ld.py | 1 - samples/document_lifecycle/document_lifecycle_sample.py | 1 - samples/hello/hello.py | 1 - samples/hello/hello_async.py | 1 - samples/index_template/index_template_sample.py | 1 - samples/json/json_hello.py | 1 - samples/json/json_hello_async.py | 1 - samples/knn/knn_async_basics.py | 1 - samples/knn/knn_basics.py | 1 - samples/knn/knn_boolean_filter.py | 1 - samples/knn/knn_efficient_filter.py | 1 - samples/logging/log_collection_sample.py | 1 - samples/security/roles.py | 1 - samples/security/users.py | 1 - setup.py | 1 - test_opensearchpy/__init__.py | 1 - test_opensearchpy/test_async/__init__.py | 1 - test_opensearchpy/test_async/test_client.py | 1 - test_opensearchpy/test_async/test_connection.py | 1 - test_opensearchpy/test_async/test_helpers/__init__.py | 1 - test_opensearchpy/test_async/test_helpers/conftest.py | 1 - test_opensearchpy/test_async/test_helpers/test_document.py | 1 - test_opensearchpy/test_async/test_helpers/test_faceted_search.py | 1 - test_opensearchpy/test_async/test_helpers/test_index.py | 1 - test_opensearchpy/test_async/test_helpers/test_mapping.py | 1 - test_opensearchpy/test_async/test_helpers/test_search.py | 1 - .../test_async/test_helpers/test_update_by_query.py | 1 - test_opensearchpy/test_async/test_http.py | 1 - test_opensearchpy/test_async/test_http_connection.py | 1 - test_opensearchpy/test_async/test_plugins_client.py | 1 - test_opensearchpy/test_async/test_server/__init__.py | 1 - test_opensearchpy/test_async/test_server/conftest.py | 1 - test_opensearchpy/test_async/test_server/test_clients.py | 1 - .../test_async/test_server/test_helpers/__init__.py | 1 - .../test_async/test_server/test_helpers/conftest.py | 1 - .../test_async/test_server/test_helpers/test_actions.py | 1 - .../test_async/test_server/test_helpers/test_data.py | 1 - .../test_async/test_server/test_helpers/test_document.py | 1 - .../test_async/test_server/test_helpers/test_faceted_search.py | 1 - .../test_async/test_server/test_helpers/test_index.py | 1 - .../test_async/test_server/test_helpers/test_mapping.py | 1 - .../test_async/test_server/test_helpers/test_search.py | 1 - .../test_async/test_server/test_helpers/test_update_by_query.py | 1 - .../test_async/test_server/test_plugins/__init__.py | 1 - .../test_async/test_server/test_plugins/test_alerting.py | 1 - .../test_async/test_server/test_plugins/test_index_management.py | 1 - test_opensearchpy/test_async/test_server/test_rest_api_spec.py | 1 - test_opensearchpy/test_async/test_server_secured/__init__.py | 1 - .../test_async/test_server_secured/test_security_plugin.py | 1 - test_opensearchpy/test_async/test_signer.py | 1 - test_opensearchpy/test_async/test_transport.py | 1 - test_opensearchpy/test_cases.py | 1 - test_opensearchpy/test_client/__init__.py | 1 - test_opensearchpy/test_client/test_cluster.py | 1 - test_opensearchpy/test_client/test_http.py | 1 - test_opensearchpy/test_client/test_indices.py | 1 - test_opensearchpy/test_client/test_overrides.py | 1 - test_opensearchpy/test_client/test_plugins/__init__.py | 1 - test_opensearchpy/test_client/test_plugins/test_alerting.py | 1 - .../test_client/test_plugins/test_index_management.py | 1 - .../test_client/test_plugins/test_plugins_client.py | 1 - test_opensearchpy/test_client/test_point_in_time.py | 1 - test_opensearchpy/test_client/test_remote_store.py | 1 - test_opensearchpy/test_client/test_requests.py | 1 - test_opensearchpy/test_client/test_urllib3.py | 1 - test_opensearchpy/test_client/test_utils.py | 1 - test_opensearchpy/test_connection/__init__.py | 1 - test_opensearchpy/test_connection/test_base_connection.py | 1 - .../test_connection/test_requests_http_connection.py | 1 - .../test_connection/test_urllib3_http_connection.py | 1 - test_opensearchpy/test_connection_pool.py | 1 - test_opensearchpy/test_exceptions.py | 1 - test_opensearchpy/test_helpers/__init__.py | 1 - test_opensearchpy/test_helpers/conftest.py | 1 - test_opensearchpy/test_helpers/test_actions.py | 1 - test_opensearchpy/test_helpers/test_aggs.py | 1 - test_opensearchpy/test_helpers/test_analysis.py | 1 - test_opensearchpy/test_helpers/test_document.py | 1 - test_opensearchpy/test_helpers/test_faceted_search.py | 1 - test_opensearchpy/test_helpers/test_field.py | 1 - test_opensearchpy/test_helpers/test_index.py | 1 - test_opensearchpy/test_helpers/test_mapping.py | 1 - test_opensearchpy/test_helpers/test_query.py | 1 - test_opensearchpy/test_helpers/test_result.py | 1 - test_opensearchpy/test_helpers/test_search.py | 1 - test_opensearchpy/test_helpers/test_update_by_query.py | 1 - test_opensearchpy/test_helpers/test_utils.py | 1 - test_opensearchpy/test_helpers/test_validation.py | 1 - test_opensearchpy/test_helpers/test_wrappers.py | 1 - test_opensearchpy/test_http_server.py | 1 - test_opensearchpy/test_serializer.py | 1 - test_opensearchpy/test_server/__init__.py | 1 - test_opensearchpy/test_server/conftest.py | 1 - test_opensearchpy/test_server/test_clients.py | 1 - test_opensearchpy/test_server/test_helpers/__init__.py | 1 - test_opensearchpy/test_server/test_helpers/conftest.py | 1 - test_opensearchpy/test_server/test_helpers/test_actions.py | 1 - test_opensearchpy/test_server/test_helpers/test_analysis.py | 1 - test_opensearchpy/test_server/test_helpers/test_count.py | 1 - test_opensearchpy/test_server/test_helpers/test_data.py | 1 - test_opensearchpy/test_server/test_helpers/test_document.py | 1 - .../test_server/test_helpers/test_faceted_search.py | 1 - test_opensearchpy/test_server/test_helpers/test_index.py | 1 - test_opensearchpy/test_server/test_helpers/test_mapping.py | 1 - test_opensearchpy/test_server/test_helpers/test_search.py | 1 - .../test_server/test_helpers/test_update_by_query.py | 1 - test_opensearchpy/test_server/test_plugins/__init__.py | 1 - test_opensearchpy/test_server/test_plugins/test_alerting.py | 1 - .../test_server/test_plugins/test_index_management.py | 1 - test_opensearchpy/test_server/test_rest_api_spec.py | 1 - test_opensearchpy/test_server_secured/__init__.py | 1 - test_opensearchpy/test_server_secured/test_clients.py | 1 - test_opensearchpy/test_server_secured/test_security_plugin.py | 1 - test_opensearchpy/test_transport.py | 1 - test_opensearchpy/test_types/aliased_types.py | 1 - test_opensearchpy/test_types/async_types.py | 1 - test_opensearchpy/test_types/sync_types.py | 1 - test_opensearchpy/utils.py | 1 - utils/build_dists.py | 1 - 224 files changed, 1 insertion(+), 223 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a9b476d..e3ea1f1b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Changed ### Deprecated ### Removed +- Removed unnecessary `# -*- coding: utf-8 -*-` headers from .py files ([#615](https://github.com/opensearch-project/opensearch-py/pull/615)) ### Fixed ### Security diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index 02c0a238..7e9d1b3d 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index bc891067..aba6d024 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 7b4695eb..0f3c5286 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/bench_sync_async.py b/benchmarks/bench_sync_async.py index 7950dc64..eb067dd6 100644 --- a/benchmarks/bench_sync_async.py +++ b/benchmarks/bench_sync_async.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/benchmarks/thread_with_return_value.py b/benchmarks/thread_with_return_value.py index 089c6fde..23b582fd 100644 --- a/benchmarks/thread_with_return_value.py +++ b/benchmarks/thread_with_return_value.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/noxfile.py b/noxfile.py index be71ac0b..d453a3a2 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index aa528c98..b852272b 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/__init__.py b/opensearchpy/_async/__init__.py index 392fa5bd..7e52ae22 100644 --- a/opensearchpy/_async/__init__.py +++ b/opensearchpy/_async/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/_extra_imports.py b/opensearchpy/_async/_extra_imports.py index e19a11a9..5fd19461 100644 --- a/opensearchpy/_async/_extra_imports.py +++ b/opensearchpy/_async/_extra_imports.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index dfe26775..f88ee0b9 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/_patch.py b/opensearchpy/_async/client/_patch.py index 1b9bcb5d..6aa2f81b 100644 --- a/opensearchpy/_async/client/_patch.py +++ b/opensearchpy/_async/client/_patch.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index fdd3dec2..a93fb208 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/client.py b/opensearchpy/_async/client/client.py index 7f0b67c6..091bb5e9 100644 --- a/opensearchpy/_async/client/client.py +++ b/opensearchpy/_async/client/client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 4838b5b4..70d93cf8 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index 44744de4..beb24247 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/features.py b/opensearchpy/_async/client/features.py index 1b69aa04..85636694 100644 --- a/opensearchpy/_async/client/features.py +++ b/opensearchpy/_async/client/features.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/http.py b/opensearchpy/_async/client/http.py index 89278c6b..63d4df27 100644 --- a/opensearchpy/_async/client/http.py +++ b/opensearchpy/_async/client/http.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index 9ec3b130..4e1c45eb 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index 81f44ec5..9702a346 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index b425412a..6afda0d2 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/plugins.py b/opensearchpy/_async/client/plugins.py index b12214d7..5bf66288 100644 --- a/opensearchpy/_async/client/plugins.py +++ b/opensearchpy/_async/client/plugins.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/remote.py b/opensearchpy/_async/client/remote.py index 433c9fa5..13787273 100644 --- a/opensearchpy/_async/client/remote.py +++ b/opensearchpy/_async/client/remote.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index 7e021106..7b97513c 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index c4cca167..c5b9f2ca 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 2519d633..c21ea254 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 9484c500..aad58c99 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/utils.py b/opensearchpy/_async/client/utils.py index 0c2235aa..dfeb096c 100644 --- a/opensearchpy/_async/client/utils.py +++ b/opensearchpy/_async/client/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/compat.py b/opensearchpy/_async/compat.py index 2ba1b980..5dbbf4f5 100644 --- a/opensearchpy/_async/compat.py +++ b/opensearchpy/_async/compat.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/__init__.py b/opensearchpy/_async/helpers/__init__.py index 22c54ac8..6c0097cd 100644 --- a/opensearchpy/_async/helpers/__init__.py +++ b/opensearchpy/_async/helpers/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index c85b2ac8..1a013d27 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index 83349f7e..09549068 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/faceted_search.py b/opensearchpy/_async/helpers/faceted_search.py index 1eb5a677..2b0501d9 100644 --- a/opensearchpy/_async/helpers/faceted_search.py +++ b/opensearchpy/_async/helpers/faceted_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index 42d63dfd..e7b33748 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index dd560564..93f04f05 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/search.py b/opensearchpy/_async/helpers/search.py index d844ba29..7f09ba7f 100644 --- a/opensearchpy/_async/helpers/search.py +++ b/opensearchpy/_async/helpers/search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/test.py b/opensearchpy/_async/helpers/test.py index 9516857c..455a2781 100644 --- a/opensearchpy/_async/helpers/test.py +++ b/opensearchpy/_async/helpers/test.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/update_by_query.py b/opensearchpy/_async/helpers/update_by_query.py index aeb8e3d2..7ea3b906 100644 --- a/opensearchpy/_async/helpers/update_by_query.py +++ b/opensearchpy/_async/helpers/update_by_query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index f301918f..b1baf148 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/__init__.py b/opensearchpy/_async/plugins/__init__.py index 22c54ac8..6c0097cd 100644 --- a/opensearchpy/_async/plugins/__init__.py +++ b/opensearchpy/_async/plugins/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/alerting.py b/opensearchpy/_async/plugins/alerting.py index f1cf3ac9..20818f11 100644 --- a/opensearchpy/_async/plugins/alerting.py +++ b/opensearchpy/_async/plugins/alerting.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/index_management.py b/opensearchpy/_async/plugins/index_management.py index bbca4e2f..cf15c4eb 100644 --- a/opensearchpy/_async/plugins/index_management.py +++ b/opensearchpy/_async/plugins/index_management.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index 854f0a06..e8b17252 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index 204e14a3..b9ad64ce 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index e6f7021b..013b1b1b 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/_patch.py b/opensearchpy/client/_patch.py index 6f5a1edb..006ae313 100644 --- a/opensearchpy/client/_patch.py +++ b/opensearchpy/client/_patch.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 8596a6f3..d30b7b90 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/client.py b/opensearchpy/client/client.py index 7f0b67c6..091bb5e9 100644 --- a/opensearchpy/client/client.py +++ b/opensearchpy/client/client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index 2686bbfa..fc49f266 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index 31b777e7..e6184e9a 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/features.py b/opensearchpy/client/features.py index c6520fa1..7bf0deee 100644 --- a/opensearchpy/client/features.py +++ b/opensearchpy/client/features.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/http.py b/opensearchpy/client/http.py index d709bf47..465a724c 100644 --- a/opensearchpy/client/http.py +++ b/opensearchpy/client/http.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 9dc12f38..424529f1 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index 13ba1a69..632a23d9 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 9ad6534a..73c0f8dd 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/plugins.py b/opensearchpy/client/plugins.py index b12214d7..5bf66288 100644 --- a/opensearchpy/client/plugins.py +++ b/opensearchpy/client/plugins.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/remote.py b/opensearchpy/client/remote.py index 5c1c0f0c..7393e0ac 100644 --- a/opensearchpy/client/remote.py +++ b/opensearchpy/client/remote.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index 799c6aa1..55d8fd6e 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index a10ec655..d7b76a90 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index 50f67357..eecd15a9 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 29361e2b..02a3eb0f 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index 0663fd1d..a38c30cd 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index ca874943..977edf37 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/__init__.py b/opensearchpy/connection/__init__.py index 287b7ecb..b86bd66e 100644 --- a/opensearchpy/connection/__init__.py +++ b/opensearchpy/connection/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index 670bbaeb..60ca210b 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index eda6adfe..845598a6 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index 5b1e9a9c..3f1edc4a 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_async.py b/opensearchpy/connection/http_async.py index d6ee57ee..468f3244 100644 --- a/opensearchpy/connection/http_async.py +++ b/opensearchpy/connection/http_async.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_requests.py b/opensearchpy/connection/http_requests.py index a966631d..9bf83004 100644 --- a/opensearchpy/connection/http_requests.py +++ b/opensearchpy/connection/http_requests.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index 2a5ccd3b..54f2a22a 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection/pooling.py b/opensearchpy/connection/pooling.py index 87bd8c72..a7a61f4b 100644 --- a/opensearchpy/connection/pooling.py +++ b/opensearchpy/connection/pooling.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index 378b91b3..d796aa13 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/exceptions.py b/opensearchpy/exceptions.py index 58d29bdf..fdea1e2a 100644 --- a/opensearchpy/exceptions.py +++ b/opensearchpy/exceptions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/__init__.py b/opensearchpy/helpers/__init__.py index 7116dc48..8057de7e 100644 --- a/opensearchpy/helpers/__init__.py +++ b/opensearchpy/helpers/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index 960d5a1c..c6c03d7e 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index 4e06e7d9..a9b87521 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index c228acd1..816a29b6 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/asyncsigner.py b/opensearchpy/helpers/asyncsigner.py index 8dee4fee..c045f138 100644 --- a/opensearchpy/helpers/asyncsigner.py +++ b/opensearchpy/helpers/asyncsigner.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index f1673ce7..f3595bcf 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/errors.py b/opensearchpy/helpers/errors.py index 220b6b31..7012c050 100644 --- a/opensearchpy/helpers/errors.py +++ b/opensearchpy/helpers/errors.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index e9ae14ef..37d067c1 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/field.py b/opensearchpy/helpers/field.py index 4ffd21d8..f46e044d 100644 --- a/opensearchpy/helpers/field.py +++ b/opensearchpy/helpers/field.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index f0885aa5..21f5407b 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index 3b6185b4..25bc0664 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index eaa13e3f..f75ef19e 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index b7861f78..1d3b6671 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/__init__.py b/opensearchpy/helpers/response/__init__.py index c6215a6b..4858f951 100644 --- a/opensearchpy/helpers/response/__init__.py +++ b/opensearchpy/helpers/response/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/aggs.py b/opensearchpy/helpers/response/aggs.py index 42015d2d..f52208e6 100644 --- a/opensearchpy/helpers/response/aggs.py +++ b/opensearchpy/helpers/response/aggs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/response/hit.py b/opensearchpy/helpers/response/hit.py index c6e8a4a9..319a886f 100644 --- a/opensearchpy/helpers/response/hit.py +++ b/opensearchpy/helpers/response/hit.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 069f4c89..5693e916 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index 43b5ee3c..a497ed5b 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/test.py b/opensearchpy/helpers/test.py index bda16b2e..6ad34b66 100644 --- a/opensearchpy/helpers/test.py +++ b/opensearchpy/helpers/test.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/update_by_query.py b/opensearchpy/helpers/update_by_query.py index 7b560216..5eac0f51 100644 --- a/opensearchpy/helpers/update_by_query.py +++ b/opensearchpy/helpers/update_by_query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index a27ec9a4..c46c374a 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/helpers/wrappers.py b/opensearchpy/helpers/wrappers.py index 1583391c..e8af30e9 100644 --- a/opensearchpy/helpers/wrappers.py +++ b/opensearchpy/helpers/wrappers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/__init__.py b/opensearchpy/plugins/__init__.py index b0a5fb09..2f42da79 100644 --- a/opensearchpy/plugins/__init__.py +++ b/opensearchpy/plugins/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/alerting.py b/opensearchpy/plugins/alerting.py index 02c6b1a1..63977420 100644 --- a/opensearchpy/plugins/alerting.py +++ b/opensearchpy/plugins/alerting.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/index_management.py b/opensearchpy/plugins/index_management.py index 77a31279..0683b006 100644 --- a/opensearchpy/plugins/index_management.py +++ b/opensearchpy/plugins/index_management.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/serializer.py b/opensearchpy/serializer.py index e8c87ba9..e5c0a6c9 100644 --- a/opensearchpy/serializer.py +++ b/opensearchpy/serializer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index 44962542..f582a3be 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index a8eb3859..b06d82c3 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/aws/search_requests.py b/samples/aws/search_requests.py index 84c7f47a..544285ac 100644 --- a/samples/aws/search_requests.py +++ b/samples/aws/search_requests.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/aws/search_urllib3.py b/samples/aws/search_urllib3.py index 00581683..5ac438d9 100644 --- a/samples/aws/search_urllib3.py +++ b/samples/aws/search_urllib3.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/bulk/bulk_array.py b/samples/bulk/bulk_array.py index e8ea6a09..cb6dc8b1 100755 --- a/samples/bulk/bulk_array.py +++ b/samples/bulk/bulk_array.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/bulk/bulk_helpers.py b/samples/bulk/bulk_helpers.py index 0468b0f5..7371d6b1 100755 --- a/samples/bulk/bulk_helpers.py +++ b/samples/bulk/bulk_helpers.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/bulk/bulk_ld.py b/samples/bulk/bulk_ld.py index 0bf556fa..89e6f661 100755 --- a/samples/bulk/bulk_ld.py +++ b/samples/bulk/bulk_ld.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py index c21ae44a..53c7e718 100644 --- a/samples/document_lifecycle/document_lifecycle_sample.py +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/hello/hello.py b/samples/hello/hello.py index a614f085..acecf890 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/hello/hello_async.py b/samples/hello/hello_async.py index 8606a17d..c6a04e02 100755 --- a/samples/hello/hello_async.py +++ b/samples/hello/hello_async.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py index ca0f8310..25cfdddd 100644 --- a/samples/index_template/index_template_sample.py +++ b/samples/index_template/index_template_sample.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/json/json_hello.py b/samples/json/json_hello.py index 4b2e10e2..c0e537ec 100755 --- a/samples/json/json_hello.py +++ b/samples/json/json_hello.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/json/json_hello_async.py b/samples/json/json_hello_async.py index 34a4ca8c..afe5065f 100755 --- a/samples/json/json_hello_async.py +++ b/samples/json/json_hello_async.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn_async_basics.py b/samples/knn/knn_async_basics.py index aa0acf6e..273015c2 100755 --- a/samples/knn/knn_async_basics.py +++ b/samples/knn/knn_async_basics.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn_basics.py b/samples/knn/knn_basics.py index c74344b2..4ea49a21 100755 --- a/samples/knn/knn_basics.py +++ b/samples/knn/knn_basics.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn_boolean_filter.py b/samples/knn/knn_boolean_filter.py index 710216f2..156fcf86 100755 --- a/samples/knn/knn_boolean_filter.py +++ b/samples/knn/knn_boolean_filter.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/knn/knn_efficient_filter.py b/samples/knn/knn_efficient_filter.py index dfe1308f..7777173d 100755 --- a/samples/knn/knn_efficient_filter.py +++ b/samples/knn/knn_efficient_filter.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/logging/log_collection_sample.py b/samples/logging/log_collection_sample.py index 1e85b977..fbf25b60 100644 --- a/samples/logging/log_collection_sample.py +++ b/samples/logging/log_collection_sample.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/security/roles.py b/samples/security/roles.py index 37558042..7628a9f4 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/samples/security/users.py b/samples/security/users.py index 3e1e90f5..7b89a37f 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/setup.py b/setup.py index 6ad7254b..09c8e88c 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/__init__.py b/test_opensearchpy/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/__init__.py +++ b/test_opensearchpy/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/__init__.py b/test_opensearchpy/test_async/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_async/__init__.py +++ b/test_opensearchpy/test_async/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_client.py b/test_opensearchpy/test_async/test_client.py index cb74a187..5badce18 100644 --- a/test_opensearchpy/test_async/test_client.py +++ b/test_opensearchpy/test_async/test_client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index c9c0dc17..2ab2529c 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/__init__.py b/test_opensearchpy/test_async/test_helpers/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_async/test_helpers/__init__.py +++ b/test_opensearchpy/test_async/test_helpers/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index bd1776ab..7b2a16f6 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 30822751..26c854c2 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index c27bd3ea..40f27871 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index e59d86ad..eccbe773 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index 797c295f..05264985 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index 1af617d7..d01f0b80 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index 52fc20c3..b5380eeb 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_http.py b/test_opensearchpy/test_async/test_http.py index 510fd165..61323bd7 100644 --- a/test_opensearchpy/test_async/test_http.py +++ b/test_opensearchpy/test_async/test_http.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index 94207433..415f34cc 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py index 32a8ec3a..49222b08 100644 --- a/test_opensearchpy/test_async/test_plugins_client.py +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index 90cbf2f4..f62ef33c 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 79952bc4..42deb3cc 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index 323532c5..cee6bc7b 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/__init__.py b/test_opensearchpy/test_async/test_server/test_helpers/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/__init__.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index 69282ead..1aa9bb18 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 3608d935..158df715 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index 7a23b8b1..b0f396f2 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 8e4e95e2..bf02161d 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index b03fefe8..783f902d 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index 14b87e15..e2670e55 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index 35a4e8d8..0aa3c0b8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index 8431fa4a..a42daf6c 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index 46e515df..a26d046e 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_plugins/__init__.py b/test_opensearchpy/test_async/test_server/test_plugins/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/__init__.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index 5af06a24..4e742c2f 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py index 4f5fcfa1..0ffae69f 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index 53aeb3ad..c56ed70c 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server_secured/__init__.py b/test_opensearchpy/test_async/test_server_secured/__init__.py index 22c54ac8..6c0097cd 100644 --- a/test_opensearchpy/test_async/test_server_secured/__init__.py +++ b/test_opensearchpy/test_async/test_server_secured/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index e5638a67..e9b8f329 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 319340da..664f6a95 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index 179a573c..e119b020 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index 29bf9394..a03f0e44 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index 3174772e..55fcd4a9 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_cluster.py b/test_opensearchpy/test_client/test_cluster.py index f170a448..3e9ad987 100644 --- a/test_opensearchpy/test_client/test_cluster.py +++ b/test_opensearchpy/test_client/test_cluster.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_http.py b/test_opensearchpy/test_client/test_http.py index 7cd168f7..145cbb6d 100644 --- a/test_opensearchpy/test_client/test_http.py +++ b/test_opensearchpy/test_client/test_http.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_indices.py b/test_opensearchpy/test_client/test_indices.py index 668eebd7..d45405e5 100644 --- a/test_opensearchpy/test_client/test_indices.py +++ b/test_opensearchpy/test_client/test_indices.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_overrides.py b/test_opensearchpy/test_client/test_overrides.py index 16cb3ab4..160a8bdd 100644 --- a/test_opensearchpy/test_client/test_overrides.py +++ b/test_opensearchpy/test_client/test_overrides.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/__init__.py b/test_opensearchpy/test_client/test_plugins/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_client/test_plugins/__init__.py +++ b/test_opensearchpy/test_client/test_plugins/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/test_alerting.py b/test_opensearchpy/test_client/test_plugins/test_alerting.py index 482a4224..f012ccbb 100644 --- a/test_opensearchpy/test_client/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_client/test_plugins/test_alerting.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/test_index_management.py b/test_opensearchpy/test_client/test_plugins/test_index_management.py index 891d6f02..a2052163 100644 --- a/test_opensearchpy/test_client/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_client/test_plugins/test_index_management.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py index ed65dca4..793afda2 100644 --- a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index 30940ce4..38a4b8cc 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py index a9bfc894..faf91297 100644 --- a/test_opensearchpy/test_client/test_remote_store.py +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py index 66ec8cbc..b3ac3d6f 100644 --- a/test_opensearchpy/test_client/test_requests.py +++ b/test_opensearchpy/test_client/test_requests.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py index 064c49cc..d30c85e7 100644 --- a/test_opensearchpy/test_client/test_urllib3.py +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index 797624fc..efed662a 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_connection/__init__.py b/test_opensearchpy/test_connection/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_connection/__init__.py +++ b/test_opensearchpy/test_connection/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py index 45cc46fd..6725849a 100644 --- a/test_opensearchpy/test_connection/test_base_connection.py +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index f175990d..a081fd05 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 971a3254..cca1945d 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index 45afd93e..9f1a7d9a 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_exceptions.py b/test_opensearchpy/test_exceptions.py index 26e9e044..a918e2b0 100644 --- a/test_opensearchpy/test_exceptions.py +++ b/test_opensearchpy/test_exceptions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/__init__.py b/test_opensearchpy/test_helpers/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_helpers/__init__.py +++ b/test_opensearchpy/test_helpers/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/conftest.py b/test_opensearchpy/test_helpers/conftest.py index 09778000..06355e24 100644 --- a/test_opensearchpy/test_helpers/conftest.py +++ b/test_opensearchpy/test_helpers/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index e44dbc98..e8bd3396 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 97ae368a..006edbe4 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_analysis.py b/test_opensearchpy/test_helpers/test_analysis.py index 0226ee48..d335b565 100644 --- a/test_opensearchpy/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_helpers/test_analysis.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index 89ee25ca..d60dd6b2 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index 528cd485..d1874541 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index ce818b50..65dbab5a 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index 59c3e28e..eac720b4 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index 2006b66f..6e4af163 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index dbda0b91..7fb2f3a0 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index d3717d37..302abdc5 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index b44d5dd5..c7fa20c5 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index 90e7aa78..afcfabc9 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index d6139826..c67bcb8b 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index 6841f604..67f2fe50 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index 37ea76b8..6826b83e 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_http_server.py b/test_opensearchpy/test_http_server.py index 844696ef..713d1931 100644 --- a/test_opensearchpy/test_http_server.py +++ b/test_opensearchpy/test_http_server.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index 4823a1d4..524887d0 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index 650991ca..36b548b5 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/conftest.py b/test_opensearchpy/test_server/conftest.py index 7acd581b..e1f83e4b 100644 --- a/test_opensearchpy/test_server/conftest.py +++ b/test_opensearchpy/test_server/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index 32550a03..e945b69a 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/__init__.py b/test_opensearchpy/test_server/test_helpers/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_server/test_helpers/__init__.py +++ b/test_opensearchpy/test_server/test_helpers/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 35c92e14..4bcc90f8 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index ab5f66e2..fad77eeb 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index e965e05b..9f8c69f6 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index 65f424d1..20b43608 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 11ad915f..9118ffed 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index ad0bf289..53e24173 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index 38dd40cd..54e49c9d 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 71f0501a..5b8250b4 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index 722a249e..62d608a6 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index 4fb00597..bbad6cc3 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index dfc4d250..07d592c7 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_plugins/__init__.py b/test_opensearchpy/test_server/test_plugins/__init__.py index 392fa5bd..7e52ae22 100644 --- a/test_opensearchpy/test_server/test_plugins/__init__.py +++ b/test_opensearchpy/test_server/test_plugins/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index aa1eaf6c..6ecac372 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_server/test_plugins/test_index_management.py index ed8c0b57..1d2b696f 100644 --- a/test_opensearchpy/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_server/test_plugins/test_index_management.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index f5c6d8c7..8df3895a 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server_secured/__init__.py b/test_opensearchpy/test_server_secured/__init__.py index 22c54ac8..6c0097cd 100644 --- a/test_opensearchpy/test_server_secured/__init__.py +++ b/test_opensearchpy/test_server_secured/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server_secured/test_clients.py b/test_opensearchpy/test_server_secured/test_clients.py index 94684ffb..c885b58d 100644 --- a/test_opensearchpy/test_server_secured/test_clients.py +++ b/test_opensearchpy/test_server_secured/test_clients.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index e43b2278..8f8c6937 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index e299e23f..6a09b83b 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/aliased_types.py b/test_opensearchpy/test_types/aliased_types.py index 6d4a5a64..f7a93e09 100644 --- a/test_opensearchpy/test_types/aliased_types.py +++ b/test_opensearchpy/test_types/aliased_types.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/async_types.py b/test_opensearchpy/test_types/async_types.py index e6275662..b26b5d67 100644 --- a/test_opensearchpy/test_types/async_types.py +++ b/test_opensearchpy/test_types/async_types.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/sync_types.py b/test_opensearchpy/test_types/sync_types.py index df6634c4..d772342b 100644 --- a/test_opensearchpy/test_types/sync_types.py +++ b/test_opensearchpy/test_types/sync_types.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index 50682d35..d4469600 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/utils/build_dists.py b/utils/build_dists.py index 137542b4..8e7b43a4 100644 --- a/utils/build_dists.py +++ b/utils/build_dists.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to From 56c40d2bd67e5997eedd3cb8184762e273fb8fbc Mon Sep 17 00:00:00 2001 From: James Addison <55152140+jayaddison@users.noreply.github.com> Date: Tue, 28 Nov 2023 19:00:31 +0000 Subject: [PATCH 70/80] Remove unnecessary utf-8 header in license_headers.py (follow-up to #615) (#617) * License tools: remove utf-8 coding declaration from license_headers.py check/fix script; since #615 it is no longer used in the library's codebase UTF-8 is the default encoding used to read source code files for Python3 - see https://docs.python.org/3/howto/unicode.html#unicode-literals-in-python-source-code Signed-off-by: James Addison * Cleanup: remove utf-8 coding declaration from the license_headers.py script itself Signed-off-by: James Addison * Update CHANGELOG.md Signed-off-by: James Addison --------- Signed-off-by: James Addison Signed-off-by: roma2023 --- CHANGELOG.md | 2 +- utils/license_headers.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e3ea1f1b..31c9a5a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Changed ### Deprecated ### Removed -- Removed unnecessary `# -*- coding: utf-8 -*-` headers from .py files ([#615](https://github.com/opensearch-project/opensearch-py/pull/615)) +- Removed unnecessary `# -*- coding: utf-8 -*-` headers from .py files ([#615](https://github.com/opensearch-project/opensearch-py/pull/615), [#617](https://github.com/opensearch-project/opensearch-py/pull/617)) ### Fixed ### Security diff --git a/utils/license_headers.py b/utils/license_headers.py index 903f176d..575e9868 100644 --- a/utils/license_headers.py +++ b/utils/license_headers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -18,7 +17,7 @@ import sys from typing import Iterator, List -LINES_TO_KEEP = ["# -*- coding: utf-8 -*-", "#!/usr/bin/env python"] +LINES_TO_KEEP = ["#!/usr/bin/env python"] LICENSE_HEADER = """ # SPDX-License-Identifier: Apache-2.0 From 546da5fe0d23e45752f428949837418a4ebe6ba4 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" Date: Fri, 1 Dec 2023 12:35:58 -0500 Subject: [PATCH 71/80] Add OpenSearch 2.11.1 integration tests. (#584) * Add OpenSearch 2.11.1 integration tests. Signed-off-by: dblock * Exclude flaky integration tests with OpenSearch 2.0.1. Signed-off-by: dblock --------- Signed-off-by: dblock Signed-off-by: roma2023 --- .github/workflows/integration.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 0ca6c823..59990902 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -9,9 +9,14 @@ jobs: strategy: fail-fast: false matrix: - opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0', '2.11.0' ] + opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0', '2.11.1' ] secured: [ "true", "false" ] - + exclude: + # https://github.com/opensearch-project/opensearch-py/issues/612 + - opensearch_version: 2.0.1 + secured: "true" + - opensearch_version: 2.1.0 + secured: "true" steps: - name: Checkout uses: actions/checkout@v3 From 1aeeef52bbf4c60dcbb252caff96258ca379ebad Mon Sep 17 00:00:00 2001 From: DJ Carrillo <60985926+Djcarrillo6@users.noreply.github.com> Date: Mon, 4 Dec 2023 06:26:25 -0800 Subject: [PATCH 72/80] Added fix for key error because of missing 'hits' key. (#616) Updated CHANGELOG.md. nox formatting applied. Added new unit test for actions scan function. Added type hints & nox formatting. Added fix to async scan function & added matching unit tests for async. Signed-off-by: Djcarrillo6 Signed-off-by: roma2023 --- CHANGELOG.md | 1 + opensearchpy/_async/helpers/actions.py | 15 ++++++---- opensearchpy/helpers/actions.py | 16 +++++++---- .../test_server/test_helpers/test_actions.py | 28 +++++++++++++++++++ .../test_helpers/test_actions.py | 22 +++++++++++++++ 5 files changed, 70 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 31c9a5a9..c7ee54a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Removed - Removed unnecessary `# -*- coding: utf-8 -*-` headers from .py files ([#615](https://github.com/opensearch-project/opensearch-py/pull/615), [#617](https://github.com/opensearch-project/opensearch-py/pull/617)) ### Fixed +- Fix KeyError when scroll return no hits ([#616](https://github.com/opensearch-project/opensearch-py/pull/616)) ### Security ## [2.4.2] diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 1a013d27..03f88043 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -390,14 +390,17 @@ async def async_scan( scroll_id = resp.get("_scroll_id") try: - while scroll_id and resp["hits"]["hits"]: - for hit in resp["hits"]["hits"]: + while scroll_id and resp.get("hits", {}).get("hits"): + for hit in resp.get("hits", {}).get("hits", []): yield hit - # Default to 0 if the value isn't included in the response - shards_successful = resp["_shards"].get("successful", 0) - shards_skipped = resp["_shards"].get("skipped", 0) - shards_total = resp["_shards"].get("total", 0) + _shards = resp.get("_shards") + + if _shards: + # Default to 0 if the value isn't included in the response + shards_successful = _shards.get("successful", 0) + shards_skipped = _shards.get("skipped", 0) + shards_total = _shards.get("total", 0) # check if we have any errors if (shards_successful + shards_skipped) < shards_total: diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index c6c03d7e..c7f24139 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -586,14 +586,17 @@ def scan( scroll_id = resp.get("_scroll_id") try: - while scroll_id and resp["hits"]["hits"]: - for hit in resp["hits"]["hits"]: + while scroll_id and resp.get("hits", {}).get("hits"): + for hit in resp.get("hits", {}).get("hits", []): yield hit - # Default to 0 if the value isn't included in the response - shards_successful = resp["_shards"].get("successful", 0) - shards_skipped = resp["_shards"].get("skipped", 0) - shards_total = resp["_shards"].get("total", 0) + _shards = resp.get("_shards") + + if _shards: + # Default to 0 if the value isn't included in the response + shards_successful = _shards.get("successful", 0) + shards_skipped = _shards.get("skipped", 0) + shards_total = _shards.get("total", 0) # check if we have any errors if (shards_successful + shards_skipped) < shards_total: @@ -614,6 +617,7 @@ def scan( shards_total, ), ) + resp = client.scroll( body={"scroll_id": scroll_id, "scroll": scroll}, **scroll_kwargs ) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 158df715..c6c54df0 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -776,6 +776,34 @@ async def test_scan_auth_kwargs_favor_scroll_kwargs_option( } assert async_client.scroll.call_args[1]["sort"] == "asc" + async def test_async_scan_with_missing_hits_key( + self, async_client: Any, scan_teardown: Any + ) -> None: + with patch.object( + async_client, + "search", + return_value=MockResponse({"_scroll_id": "dummy_scroll_id", "_shards": {}}), + ): + with patch.object( + async_client, + "scroll", + return_value=MockResponse( + {"_scroll_id": "dummy_scroll_id", "_shards": {}} + ), + ): + with patch.object( + async_client, "clear_scroll", return_value=MockResponse({}) + ): + async_scan_result = [ + hit + async for hit in actions.async_scan( + async_client, query={"query": {"match_all": {}}} + ) + ] + assert ( + async_scan_result == [] + ), "Expected empty results when 'hits' key is missing" + @pytest.fixture(scope="function") # type: ignore async def reindex_setup(async_client: Any) -> Any: diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index e8bd3396..c43c7322 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -28,6 +28,7 @@ import threading import time from typing import Any +from unittest.mock import Mock import mock import pytest @@ -270,3 +271,24 @@ def test_string_actions_are_marked_as_simple_inserts(self) -> None: self.assertEqual( ('{"index":{}}', "whatever"), helpers.expand_action("whatever") ) + + +class TestScanFunction(TestCase): + @mock.patch("opensearchpy.OpenSearch.clear_scroll") + @mock.patch("opensearchpy.OpenSearch.scroll") + @mock.patch("opensearchpy.OpenSearch.search") + def test_scan_with_missing_hits_key( + self, mock_search: Mock, mock_scroll: Mock, mock_clear_scroll: Mock + ) -> None: + # Simulate a response where the 'hits' key is missing + mock_search.return_value = {"_scroll_id": "dummy_scroll_id", "_shards": {}} + + mock_scroll.side_effect = [{"_scroll_id": "dummy_scroll_id", "_shards": {}}] + + mock_clear_scroll.return_value = None + + client = OpenSearch() + + # The test should pass without raising a KeyError + scan_result = list(helpers.scan(client, query={"query": {"match_all": {}}})) + assert scan_result == [], "Expected empty results when 'hits' key is missing" From ac16a10af5e1363c8758804221cf9e4bfa1dba88 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 16:50:46 -0500 Subject: [PATCH 73/80] Update pytest-asyncio requirement from <=0.21.1 to <=0.23.2 (#625) * Update pytest-asyncio requirement from <=0.21.1 to <=0.23.2 Updates the requirements on [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.1.1...v0.23.2) --- updated-dependencies: - dependency-name: pytest-asyncio dependency-type: direct:development ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] Signed-off-by: roma2023 --- CHANGELOG.md | 4 +++- dev-requirements.txt | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c7ee54a9..fd2ed4a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Fixed - Fix KeyError when scroll return no hits ([#616](https://github.com/opensearch-project/opensearch-py/pull/616)) ### Security +### Dependencies +- Bumps `pytest-asyncio` from <=0.21.1 to <=0.23.2 ## [2.4.2] ### Added @@ -208,4 +210,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) [2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 [2.1.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.0...v2.1.1 [2.1.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.1...v2.1.0 -[2.0.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.0...v2.0.1 +[2.0.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.0...v2.0.1 \ No newline at end of file diff --git a/dev-requirements.txt b/dev-requirements.txt index a79a1a0b..a256eae6 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -21,5 +21,5 @@ twine # Requirements for testing [async] extra aiohttp -pytest-asyncio<=0.21.1 +pytest-asyncio<=0.23.2 unasync From 3ce75840bf8c76be246763b7b32e013be62dfb83 Mon Sep 17 00:00:00 2001 From: Sai Medhini Reddy Maryada <117196660+saimedhi@users.noreply.github.com> Date: Fri, 15 Dec 2023 07:41:29 -0800 Subject: [PATCH 74/80] Bumps urllib3 from >=1.26.18 to >=1.26.18, <2 (#632) Signed-off-by: saimedhi Signed-off-by: roma2023 --- CHANGELOG.md | 1 + setup.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fd2ed4a7..97f83140 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Security ### Dependencies - Bumps `pytest-asyncio` from <=0.21.1 to <=0.23.2 +- Bumps `urllib3` from >=1.26.18 to >=1.26.18, <2 ([#632](https://github.com/opensearch-project/opensearch-py/pull/632)) ## [2.4.2] ### Added diff --git a/setup.py b/setup.py index 09c8e88c..78cc2a23 100644 --- a/setup.py +++ b/setup.py @@ -52,7 +52,7 @@ if package == MODULE_DIR or package.startswith(MODULE_DIR + ".") ] install_requires = [ - "urllib3>=1.26.18", + "urllib3>=1.26.18, <2", "requests>=2.4.0, <3.0.0", "six", "python-dateutil", From e7f9831e33ab7d4a7b1741365f6a8b0cde09644f Mon Sep 17 00:00:00 2001 From: Mark Cohen Date: Fri, 22 Dec 2023 15:32:48 -0500 Subject: [PATCH 75/80] samples directory now passes the missing-function-docstring linter (#640) * updated files with docstrings to pass pylint Signed-off-by: Mark Cohen * updated samples to prepare for enabling missing-docstring linter; will continue to work on this before committing setup.cfg Signed-off-by: Mark Cohen * removed missing-function-docstring from setup.cfg so the linter doesn't fail while work on docstrings continues Signed-off-by: Mark Cohen * corrected unnecessary return docstring values Signed-off-by: Mark Cohen * fixing failure in 'black' on reformatting Signed-off-by: Mark Cohen --------- Signed-off-by: Mark Cohen Signed-off-by: roma2023 --- benchmarks/bench_async.py | 12 +++++++++ benchmarks/bench_info_sync.py | 7 ++++++ benchmarks/bench_sync.py | 7 ++++++ .../advanced_index_actions_sample.py | 4 +++ samples/aws/search_requests.py | 6 +++++ samples/aws/search_urllib3.py | 10 +++++++- samples/bulk/bulk_array.py | 1 + samples/bulk/bulk_helpers.py | 4 +++ samples/bulk/bulk_ld.py | 3 +++ .../document_lifecycle_sample.py | 3 +++ samples/hello/hello.py | 4 +++ samples/hello/hello_async.py | 4 +++ .../index_template/index_template_sample.py | 25 +++++++++++++------ samples/json/json_hello.py | 3 +++ samples/json/json_hello_async.py | 4 +++ samples/knn/knn_async_basics.py | 3 +++ samples/knn/knn_basics.py | 3 +++ samples/knn/knn_boolean_filter.py | 3 +++ samples/knn/knn_efficient_filter.py | 3 +++ samples/logging/log_collection_sample.py | 15 +++++++++-- samples/security/roles.py | 6 ++--- samples/security/users.py | 6 ++--- 22 files changed, 119 insertions(+), 17 deletions(-) diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index 7e9d1b3d..3df4e0ed 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -17,6 +17,7 @@ async def index_records(client: Any, index_name: str, item_count: int) -> None: + """asynchronously bulk index item_count records into the index (index_name)""" await asyncio.gather( *[ client.index( @@ -34,6 +35,10 @@ async def index_records(client: Any, index_name: str, item_count: int) -> None: async def test_async(client_count: int = 1, item_count: int = 1) -> None: + """ + asynchronously index with item_count records and run client_count clients. This function can be used to + test balancing the number of items indexed with the number of documents. + """ host = "localhost" port = 9200 auth = ("admin", "admin") @@ -74,6 +79,7 @@ async def test_async(client_count: int = 1, item_count: int = 1) -> None: def test(item_count: int = 1, client_count: int = 1) -> None: + """sets up and executes the asynchronous tests""" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(test_async(item_count, client_count)) @@ -84,26 +90,32 @@ def test(item_count: int = 1, client_count: int = 1) -> None: def test_1() -> None: + """run a test for one item and 32*ITEM_COUNT clients""" test(1, 32 * ITEM_COUNT) def test_2() -> None: + """run a test for two items and 16*ITEM_COUNT clients""" test(2, 16 * ITEM_COUNT) def test_4() -> None: + """run a test for two items and 8*ITEM_COUNT clients""" test(4, 8 * ITEM_COUNT) def test_8() -> None: + """run a test for four items and 4*ITEM_COUNT clients""" test(8, 4 * ITEM_COUNT) def test_16() -> None: + """run a test for 16 items and 2*ITEM_COUNT clients""" test(16, 2 * ITEM_COUNT) def test_32() -> None: + """run a test for 32 items and ITEM_COUNT clients""" test(32, ITEM_COUNT) diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index aba6d024..36e59814 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -21,6 +21,7 @@ def get_info(client: Any, request_count: int) -> float: + """get info from client""" tt: float = 0 for n in range(request_count): start = time.time() * 1000 @@ -31,6 +32,7 @@ def get_info(client: Any, request_count: int) -> float: def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -> None: + """test to index with thread_count threads, item_count records and run client_count clients""" host = "localhost" port = 9200 auth = ("admin", "admin") @@ -79,22 +81,27 @@ def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) - def test_1() -> None: + """testing 1 threads""" test(1, 32 * REQUEST_COUNT, 1) def test_2() -> None: + """testing 2 threads""" test(2, 16 * REQUEST_COUNT, 2) def test_4() -> None: + """testing 4 threads""" test(4, 8 * REQUEST_COUNT, 3) def test_8() -> None: + """testing 8 threads""" test(8, 4 * REQUEST_COUNT, 8) def test_32() -> None: + """testing 32 threads""" test(32, REQUEST_COUNT, 32) diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 0f3c5286..d86085d2 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -22,6 +22,7 @@ def index_records(client: Any, index_name: str, item_count: int) -> Any: + """bulk index item_count records into index_name""" tt = 0 for n in range(10): data: Any = [] @@ -48,6 +49,7 @@ def index_records(client: Any, index_name: str, item_count: int) -> Any: def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> None: + """test to index with thread_count threads, item_count records and run client_count clients""" host = "localhost" port = 9200 auth = ("admin", "admin") @@ -118,22 +120,27 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N def test_1() -> None: + """testing 1 threads""" test(1, 32 * ITEM_COUNT, 1) def test_2() -> None: + """testing 2 threads""" test(2, 16 * ITEM_COUNT, 2) def test_4() -> None: + """testing 4 threads""" test(4, 8 * ITEM_COUNT, 3) def test_8() -> None: + """testing 8 threads""" test(8, 4 * ITEM_COUNT, 8) def test_32() -> None: + """testing 32 threads""" test(32, ITEM_COUNT, 32) diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index b06d82c3..b5af6be4 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -18,6 +18,10 @@ def main() -> None: + """ + demonstrates various functions to operate on the index (e.g. clear different levels of cache, refreshing the + index) + """ # Set up client = OpenSearch( hosts=["https://localhost:9200"], diff --git a/samples/aws/search_requests.py b/samples/aws/search_requests.py index 544285ac..743d3d96 100644 --- a/samples/aws/search_requests.py +++ b/samples/aws/search_requests.py @@ -20,6 +20,12 @@ def main() -> None: + """ + connects to a cluster specified in environment variables, creates an index, inserts documents, + searches the index, deletes the document, deletes the index. + the environment variables are "ENDPOINT" for the cluster endpoint, AWS_REGION for the region in which the cluster + is hosted, and SERVICE to indicate if this is an ES 7.10.2 compatible cluster + """ # verbose logging logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) diff --git a/samples/aws/search_urllib3.py b/samples/aws/search_urllib3.py index 5ac438d9..c7382d0d 100644 --- a/samples/aws/search_urllib3.py +++ b/samples/aws/search_urllib3.py @@ -20,10 +20,18 @@ def main() -> None: + """ + 1. connects to an OpenSearch cluster on AWS defined by environment variables (i.e. ENDPOINT - cluster endpoint like + my-test-domain.us-east-1.es.amazonaws.com; AWS_REGION like us-east-1, us-west-2; and SERVICE like es which + differentiates beteween serverless and the managed service. + 2. creates an index called "movies" and adds a single document + 3. queries for that document + 4. deletes the document + 5. deletes the index + """ # verbose logging logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) - # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com url = urlparse(environ["ENDPOINT"]) region = environ.get("AWS_REGION", "us-east-1") service = environ.get("SERVICE", "es") diff --git a/samples/bulk/bulk_array.py b/samples/bulk/bulk_array.py index cb6dc8b1..a7814ddb 100755 --- a/samples/bulk/bulk_array.py +++ b/samples/bulk/bulk_array.py @@ -17,6 +17,7 @@ def main() -> None: + """demonstrates how to bulk load data into an index""" # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/bulk/bulk_helpers.py b/samples/bulk/bulk_helpers.py index 7371d6b1..02150e25 100755 --- a/samples/bulk/bulk_helpers.py +++ b/samples/bulk/bulk_helpers.py @@ -17,6 +17,10 @@ def main() -> None: + """ + demonstrates how to bulk load data using opensearchpy.helpers including examples of serial, parallel, and streaming + bulk load + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/bulk/bulk_ld.py b/samples/bulk/bulk_ld.py index 89e6f661..a6c3a585 100755 --- a/samples/bulk/bulk_ld.py +++ b/samples/bulk/bulk_ld.py @@ -17,6 +17,9 @@ def main() -> None: + """ + bulk index 100 items and then delete the index + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py index 53c7e718..b195262b 100644 --- a/samples/document_lifecycle/document_lifecycle_sample.py +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -17,6 +17,9 @@ def main() -> None: + """ + provides samples for different ways to handle documents including indexing, searching, updating, and deleting + """ # Connect to OpenSearch client = OpenSearch( hosts=["https://localhost:9200"], diff --git a/samples/hello/hello.py b/samples/hello/hello.py index acecf890..39744752 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -16,6 +16,10 @@ def main() -> None: + """ + an example showing how to create an synchronous connection to OpenSearch, create an index, index a document + and search to return the document + """ host = "localhost" port = 9200 auth = ("admin", "admin") # For testing only. Don't store credentials in code. diff --git a/samples/hello/hello_async.py b/samples/hello/hello_async.py index c6a04e02..a3620dba 100755 --- a/samples/hello/hello_async.py +++ b/samples/hello/hello_async.py @@ -16,6 +16,10 @@ async def main() -> None: + """ + an example showing how to create an asynchronous connection to OpenSearch, create an index, index a document + and search to return the document + """ # connect to OpenSearch host = "localhost" port = 9200 diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py index 25cfdddd..00978aba 100644 --- a/samples/index_template/index_template_sample.py +++ b/samples/index_template/index_template_sample.py @@ -12,6 +12,20 @@ def main() -> None: + """ + 1. connects to an OpenSearch instance running on localhost + 2. Create an index template named `books` with default settings and mappings for indices of + the `books-*` pattern. You can create an index template to define default settings and mappings for indices + of certain patterns. + 3. When creating an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's + settings and mappings to the index. Create an index named books-nonfiction and verify that its settings and mappings + match those of the template + 4. If multiple index templates match the index's name, OpenSearch will apply the template with the highest + `priority`. In the example, two templates are created with different priorities. + 5. Composable index templates are a new type of index template that allow you to define multiple component templates + and compose them into a final template. The last part of the example before cleaning up creates a component + template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns. + """ # Create a client instance client = OpenSearch( hosts=["https://localhost:9200"], @@ -20,8 +34,7 @@ def main() -> None: http_auth=("admin", "admin"), ) - # You can create an index template to define default settings and mappings for indices of certain patterns. - # The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: + # create an index template client.indices.put_index_template( name="books", body={ @@ -41,13 +54,10 @@ def main() -> None: }, ) - # Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. - # Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: + # create the index which applies the index template settings matched by pattern client.indices.create(index="books-nonfiction") print(client.indices.get(index="books-nonfiction")) - # If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. - # The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: client.indices.put_index_template( name="books", body={ @@ -74,8 +84,6 @@ def main() -> None: client.indices.create(index="books-fiction-romance") print(client.indices.get(index="books-fiction-romance")) - # Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. - # The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: client.cluster.put_component_template( name="books_mappings", body={ @@ -92,6 +100,7 @@ def main() -> None: }, ) + # composable index templates client.indices.put_index_template( name="books", body={ diff --git a/samples/json/json_hello.py b/samples/json/json_hello.py index c0e537ec..9a8c213f 100755 --- a/samples/json/json_hello.py +++ b/samples/json/json_hello.py @@ -14,6 +14,9 @@ def main() -> None: + """ + demonstrates how to index a document using a dict + """ # connect to OpenSearch host = "localhost" diff --git a/samples/json/json_hello_async.py b/samples/json/json_hello_async.py index afe5065f..499def22 100755 --- a/samples/json/json_hello_async.py +++ b/samples/json/json_hello_async.py @@ -16,6 +16,10 @@ async def main() -> None: + """ + this sample uses asyncio and AsyncOpenSearch to asynchronously connect to local OpenSearch cluster, create an index, + index data, search the index, delete the document, delete the index + """ # connect to OpenSearch host = "localhost" port = 9200 diff --git a/samples/knn/knn_async_basics.py b/samples/knn/knn_async_basics.py index 273015c2..8847f924 100755 --- a/samples/knn/knn_async_basics.py +++ b/samples/knn/knn_async_basics.py @@ -18,6 +18,9 @@ async def main() -> None: + """ + asynchronously create, bulk index, and query kNN. then delete the index + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") port = int(os.getenv("PORT", 9200)) diff --git a/samples/knn/knn_basics.py b/samples/knn/knn_basics.py index 4ea49a21..b3cdfca4 100755 --- a/samples/knn/knn_basics.py +++ b/samples/knn/knn_basics.py @@ -17,6 +17,9 @@ def main() -> None: + """ + create, bulk index, and query kNN. then delete the index + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/knn/knn_boolean_filter.py b/samples/knn/knn_boolean_filter.py index 156fcf86..40b5434b 100755 --- a/samples/knn/knn_boolean_filter.py +++ b/samples/knn/knn_boolean_filter.py @@ -17,6 +17,9 @@ def main() -> None: + """ + create, query, and delete a kNN index + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/knn/knn_efficient_filter.py b/samples/knn/knn_efficient_filter.py index 7777173d..4c23a43e 100755 --- a/samples/knn/knn_efficient_filter.py +++ b/samples/knn/knn_efficient_filter.py @@ -16,6 +16,9 @@ def main() -> None: + """ + create a kNN index using Lucene kNN and query it using filters + """ # connect to an instance of OpenSearch host = os.getenv("HOST", default="localhost") diff --git a/samples/logging/log_collection_sample.py b/samples/logging/log_collection_sample.py index fbf25b60..84ff0194 100644 --- a/samples/logging/log_collection_sample.py +++ b/samples/logging/log_collection_sample.py @@ -23,6 +23,10 @@ def main() -> None: + """ + sample for custom logging; this shows how to create a console handler, connect to OpenSearch, define a custom + logger and log to an OpenSearch index + """ print("Collecting logs.") # Create a console handler @@ -47,15 +51,22 @@ def main() -> None: # Add console handler to the logger os_logger.addHandler(console_handler) - # Define a custom handler that logs to OpenSearch class OpenSearchHandler(logging.Handler): + """ + define a custom handler that logs to opensearch + """ + # Initializer / Instance attributes def __init__(self, opensearch_client: Any) -> None: super().__init__() self.os_client = opensearch_client - # Build index name (e.g., "logs-YYYY-MM-DD") def _build_index_name(self) -> str: + """ + Build index name (e.g., "logs-YYYY-MM-DD") + :rtype: bool + :return: a str with date formatted as 'logs-YYYY-MM-DD' + """ return f"logs-{datetime.date(datetime.now())}" # Emit logs to the OpenSearch cluster diff --git a/samples/security/roles.py b/samples/security/roles.py index 7628a9f4..53508634 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -9,13 +9,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - -# A basic OpenSearch sample that create and manage roles. - from opensearchpy import OpenSearch def main() -> None: + """ + A basic OpenSearch sample that create and manage roles. + """ # connect to OpenSearch host = "localhost" diff --git a/samples/security/users.py b/samples/security/users.py index 7b89a37f..54f4d36e 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -9,13 +9,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - -# A basic OpenSearch sample that create and manage users. - from opensearchpy import OpenSearch def main() -> None: + """ + A basic OpenSearch sample that create and manage users. + """ # connect to OpenSearch host = "localhost" From 674f14e4bea4db52ddd0f28c3c35ea547cb91190 Mon Sep 17 00:00:00 2001 From: roma2023 Date: Thu, 28 Dec 2023 13:04:31 +0600 Subject: [PATCH 76/80] format snapshot_sample.py signed-off-by: roma2023 Signed-off-by: roma2023 --- samples/snapshot/snapshot_sample.py | 1 + 1 file changed, 1 insertion(+) diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py index ac4e85bb..904a1080 100644 --- a/samples/snapshot/snapshot_sample.py +++ b/samples/snapshot/snapshot_sample.py @@ -6,6 +6,7 @@ # compatible open source license. import tempfile + from opensearchpy import OpenSearch # connect to OpenSearch From 1448208adac06d55eac35283f13413be6f436735 Mon Sep 17 00:00:00 2001 From: roma2023 Date: Thu, 28 Dec 2023 13:22:51 +0600 Subject: [PATCH 77/80] format snapshot_sample.py signed-off-by: roma2023 Signed-off-by: roma2023 --- samples/snapshot/snapshot_sample.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py index 904a1080..73f2e2f3 100644 --- a/samples/snapshot/snapshot_sample.py +++ b/samples/snapshot/snapshot_sample.py @@ -5,18 +5,18 @@ # this file be licensed under the Apache-2.0 license or a # compatible open source license. -import tempfile - from opensearchpy import OpenSearch +import tempfile + # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts=[{'host': host, 'port': port}], + hosts=[{"host": host, "port": port}], http_auth=auth, use_ssl=True, verify_certs=False, @@ -40,14 +40,14 @@ } } -repository_name = 'my_repository' +repository_name = "my_repository" response = client.snapshot.create_repository(repository=repository_name, body=repo_body) print(response) # Create a snapshot -snapshot_name = 'my_snapshot' +snapshot_name = "my_snapshot" response = client.snapshot.create(repository=repository_name, snapshot=snapshot_name, body={"indices": index_name}) print(response) From 46ee9c1453d025c597bbd6290dcff7fbeb32ddef Mon Sep 17 00:00:00 2001 From: roma2023 Date: Thu, 28 Dec 2023 13:25:23 +0600 Subject: [PATCH 78/80] format snapshot_sample.py signed-off-by: roma2023 Signed-off-by: roma2023 --- samples/snapshot/snapshot_sample.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py index 73f2e2f3..8d52a83b 100644 --- a/samples/snapshot/snapshot_sample.py +++ b/samples/snapshot/snapshot_sample.py @@ -5,10 +5,10 @@ # this file be licensed under the Apache-2.0 license or a # compatible open source license. -from opensearchpy import OpenSearch - import tempfile +from opensearchpy import OpenSearch + # connect to OpenSearch host = "localhost" @@ -20,7 +20,7 @@ http_auth=auth, use_ssl=True, verify_certs=False, - ssl_show_warn=False + ssl_show_warn=False, ) # Create an index @@ -37,7 +37,7 @@ "type": "fs", # Replace 'fs' with the appropriate repository type "settings": { "location": temp_repo_location, # Replace with the desired repository location - } + }, } repository_name = "my_repository" @@ -48,7 +48,9 @@ # Create a snapshot snapshot_name = "my_snapshot" -response = client.snapshot.create(repository=repository_name, snapshot=snapshot_name, body={"indices": index_name}) +response = client.snapshot.create( + repository=repository_name, snapshot=snapshot_name, body={"indices": index_name} +) print(response) From 9656c2732ba5e18db5b18bfdf1c722701a59c161 Mon Sep 17 00:00:00 2001 From: roma2023 Date: Thu, 28 Dec 2023 13:36:12 +0600 Subject: [PATCH 79/80] lint snapshot_sample.py Signed-off-by: roma2023 --- samples/snapshot/snapshot_sample.py | 30 ++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py index 8d52a83b..835388fe 100644 --- a/samples/snapshot/snapshot_sample.py +++ b/samples/snapshot/snapshot_sample.py @@ -11,12 +11,12 @@ # connect to OpenSearch -host = "localhost" -port = 9200 +HOST = "localhost" +PORT = 9200 auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts=[{"host": host, "port": port}], + hosts=[{"host": HOST, "port": PORT}], http_auth=auth, use_ssl=True, verify_certs=False, @@ -25,46 +25,46 @@ # Create an index -index_name = "test-snapshot" -client.indices.create(index=index_name) +INDEX_NAME = "test-snapshot" +client.indices.create(index=INDEX_NAME) # Create a temporary directory for the snapshot repository temp_repo = tempfile.TemporaryDirectory() -temp_repo_location = "/usr/share/opensearch/backups" +TEMP_REPO_LOCATION = "/usr/share/opensearch/backups" # Define the repository body with the temporary location repo_body = { "type": "fs", # Replace 'fs' with the appropriate repository type "settings": { - "location": temp_repo_location, # Replace with the desired repository location + "location": TEMP_REPO_LOCATION, # Replace with the desired repository location }, } -repository_name = "my_repository" -response = client.snapshot.create_repository(repository=repository_name, body=repo_body) +REPOSITORY_NAME = "my_repository" +response = client.snapshot.create_repository(repository=REPOSITORY_NAME, body=repo_body) print(response) # Create a snapshot -snapshot_name = "my_snapshot" +SNAPSHOT_NAME = "my_snapshot" response = client.snapshot.create( - repository=repository_name, snapshot=snapshot_name, body={"indices": index_name} + repository=REPOSITORY_NAME, snapshot=SNAPSHOT_NAME, body={"indices": INDEX_NAME} ) print(response) # Get Snapshot Information -snapshot_info = client.snapshot.get(repository=repository_name, snapshot=snapshot_name) +snapshot_info = client.snapshot.get(repository=REPOSITORY_NAME, snapshot=SNAPSHOT_NAME) print(snapshot_info) # Clean up - Delete Snapshot and Repository -client.snapshot.delete(repository=repository_name, snapshot=snapshot_name) -client.snapshot.delete_repository(repository=repository_name) +client.snapshot.delete(repository=REPOSITORY_NAME, snapshot=SNAPSHOT_NAME) +client.snapshot.delete_repository(repository=REPOSITORY_NAME) # Clean up - Delete Index -client.indices.delete(index=index_name) +client.indices.delete(index=INDEX_NAME) From b22a37be8a70ae951eb2d657c38b64cf46c54290 Mon Sep 17 00:00:00 2001 From: roma2023 Date: Thu, 28 Dec 2023 20:28:27 +0600 Subject: [PATCH 80/80] added license header Signed-off-by: roma2023 --- samples/snapshot/snapshot_sample.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/samples/snapshot/snapshot_sample.py b/samples/snapshot/snapshot_sample.py index 835388fe..b8081ba1 100644 --- a/samples/snapshot/snapshot_sample.py +++ b/samples/snapshot/snapshot_sample.py @@ -1,9 +1,13 @@ #!/usr/bin/env python + # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. import tempfile